Create method for obtaining UTC-prepared Postgres client.
[rapid-gossip-sync-server] / src / lib.rs
index 73d3550119f1aa7b6b2caf3ae5f55b0f6b33cade..550ed79ddcdac4d56f65e93e5ac94f7fa030bfa7 100644 (file)
@@ -20,6 +20,7 @@ use lightning::routing::gossip::{NetworkGraph, NodeId};
 use lightning::util::logger::Logger;
 use lightning::util::ser::{ReadableArgs, Writeable};
 use tokio::sync::mpsc;
+use tokio_postgres::{Client, NoTls};
 use crate::lookup::DeltaSet;
 
 use crate::persistence::GossipPersister;
@@ -88,7 +89,7 @@ impl<L: Deref + Clone + Send + Sync + 'static> RapidSyncProcessor<L> where L::Ta
                let (sync_completion_sender, mut sync_completion_receiver) = mpsc::channel::<()>(1);
 
                if config::DOWNLOAD_NEW_GOSSIP {
-                       let (mut persister, persistence_sender) = GossipPersister::new(Arc::clone(&self.network_graph));
+                       let (mut persister, persistence_sender) = GossipPersister::new(self.network_graph.clone(), self.logger.clone());
 
                        log_info!(self.logger, "Starting gossip download");
                        tokio::spawn(tracking::download_gossip(persistence_sender, sync_completion_sender,
@@ -110,6 +111,20 @@ impl<L: Deref + Clone + Send + Sync + 'static> RapidSyncProcessor<L> where L::Ta
        }
 }
 
+pub(crate) async fn connect_to_db() -> Client {
+       let connection_config = config::db_connection_config();
+       let (client, connection) = connection_config.connect(NoTls).await.unwrap();
+
+       tokio::spawn(async move {
+               if let Err(e) = connection.await {
+                       panic!("connection error: {}", e);
+               }
+       });
+
+       client.execute("set time zone UTC", &[]).await.unwrap();
+       client
+}
+
 /// This method generates a no-op blob that can be used as a delta where none exists.
 ///
 /// The primary purpose of this method is the scenario of a client retrieving and processing a
@@ -141,17 +156,11 @@ fn serialize_empty_blob(current_timestamp: u64) -> Vec<u8> {
        blob
 }
 
-async fn serialize_delta<L: Deref>(network_graph: Arc<NetworkGraph<L>>, last_sync_timestamp: u32, logger: L) -> SerializedResponse where L::Target: Logger {
-       let (client, connection) = lookup::connect_to_db().await;
+async fn serialize_delta<L: Deref + Clone>(network_graph: Arc<NetworkGraph<L>>, last_sync_timestamp: u32, logger: L) -> SerializedResponse where L::Target: Logger {
+       let client = connect_to_db().await;
 
        network_graph.remove_stale_channels_and_tracking();
 
-       tokio::spawn(async move {
-               if let Err(e) = connection.await {
-                       panic!("connection error: {}", e);
-               }
-       });
-
        let mut output: Vec<u8> = vec![];
 
        // set a flag if the chain hash is prepended
@@ -175,11 +184,11 @@ async fn serialize_delta<L: Deref>(network_graph: Arc<NetworkGraph<L>>, last_syn
        };
 
        let mut delta_set = DeltaSet::new();
-       lookup::fetch_channel_announcements(&mut delta_set, network_graph, &client, last_sync_timestamp).await;
+       lookup::fetch_channel_announcements(&mut delta_set, network_graph, &client, last_sync_timestamp, logger.clone()).await;
        log_info!(logger, "announcement channel count: {}", delta_set.len());
-       lookup::fetch_channel_updates(&mut delta_set, &client, last_sync_timestamp).await;
+       lookup::fetch_channel_updates(&mut delta_set, &client, last_sync_timestamp, logger.clone()).await;
        log_info!(logger, "update-fetched channel count: {}", delta_set.len());
-       lookup::filter_delta_set(&mut delta_set);
+       lookup::filter_delta_set(&mut delta_set, logger.clone());
        log_info!(logger, "update-filtered channel count: {}", delta_set.len());
        let serialization_details = serialization::serialize_delta_set(delta_set, last_sync_timestamp);