diff --git a/Cargo.lock b/Cargo.lock index 0815fec0..1bde1199 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1129,6 +1129,12 @@ dependencies = [ "termcolor", ] +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + [[package]] name = "erased-serde" version = "0.3.23" @@ -1403,7 +1409,7 @@ dependencies = [ "futures-sink", "futures-util", "http", - "indexmap", + "indexmap 1.9.1", "slab", "tokio", "tokio-util", @@ -1428,6 +1434,12 @@ dependencies = [ "ahash", ] +[[package]] +name = "hashbrown" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" + [[package]] name = "headers" version = "0.3.8" @@ -1688,6 +1700,16 @@ dependencies = [ "hashbrown 0.12.3", ] +[[package]] +name = "indexmap" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" +dependencies = [ + "equivalent", + "hashbrown 0.14.0", +] + [[package]] name = "indicatif" version = "0.16.2" @@ -2699,7 +2721,7 @@ dependencies = [ [[package]] name = "pyth-agent" -version = "1.4.0" +version = "2.0.0" dependencies = [ "anyhow", "async-trait", @@ -2736,6 +2758,7 @@ dependencies = [ "tokio-retry", "tokio-stream", "tokio-util", + "toml_edit", "tracing", "typed-html", "warp", @@ -3513,7 +3536,7 @@ version = "0.8.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "578a7433b776b56a35785ed5ce9a7e777ac0598aac5a6dd1b4b18a307c7fc71b" dependencies = [ - "indexmap", + "indexmap 1.9.1", "ryu", "serde", "yaml-rust", @@ -3855,7 +3878,7 @@ dependencies = [ "enum_dispatch", "futures", "futures-util", - "indexmap", + "indexmap 1.9.1", "indicatif", "itertools 0.10.3", "jsonrpc-core", @@ -4241,7 +4264,7 @@ dependencies = [ "crossbeam-channel", "futures-util", "histogram", - "indexmap", + "indexmap 1.9.1", "itertools 0.10.3", "libc", "log", @@ -4834,6 +4857,23 @@ dependencies = [ "serde", ] +[[package]] +name = "toml_datetime" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" + +[[package]] +name = "toml_edit" +version = "0.19.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f8751d9c1b03c6500c387e96f81f815a4f8e72d142d2d4a9ffa6fedd51ddee7" +dependencies = [ + "indexmap 2.0.0", + "toml_datetime", + "winnow", +] + [[package]] name = "tower-service" version = "0.3.2" @@ -5383,6 +5423,15 @@ version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5" +[[package]] +name = "winnow" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81fac9742fd1ad1bd9643b991319f72dd031016d44b77039a26977eb667141e7" +dependencies = [ + "memchr", +] + [[package]] name = "winreg" version = "0.10.1" diff --git a/Cargo.toml b/Cargo.toml index 7a31b7c2..60a4cc42 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,12 +1,16 @@ [package] name = "pyth-agent" -version = "1.4.0" +version = "2.0.0" edition = "2021" [[bin]] name = "agent" path = "src/bin/agent.rs" +[[bin]] +name = "agent-migrate-config" +path = "src/bin/agent_migrate_config.rs" + [dependencies] anyhow = "1.0.55" serde = { version = "1.0.136", features = ["derive"] } @@ -46,6 +50,7 @@ typed-html = { git = "https://github.com/bodil/typed-html", rev = "4c13ecca" } humantime = "2.1.0" prometheus-client = "0.19.0" lazy_static = "1.4.0" +toml_edit = "0.19.13" [dev-dependencies] tokio-util = { version = "0.7.0", features = ["full"] } diff --git a/README.md b/README.md index 1f74b9f3..7ed28d4b 100644 --- a/README.md +++ b/README.md @@ -32,24 +32,46 @@ The logging level can be configured at runtime through the `RUST_LOG` environment variable using the standard `error|warn|info|debug|trace` levels. -### Key Store -If you already have a key store set up, you can skip this step. If you haven't, you will need to create one before publishing data. A key store contains the cryptographic keys needed to publish data. Once you have a key store set up, please ensure that the configuration file mentioned above contains the correct path to your key store. +### Key Store Config Migration [v1.x.x LEGACY] +Pyth agent v2.0.0 introduces a simplified program and mapping key configuration. This breaking change alters how you define program/mapping key options in your agent config: +```toml +# Old v1.x.x way +[primary network] +key_store.root_path = "/path/to/keystore" +key_store.publish_keypair_path = "publish_key_pair.json" # Relative path from root_path, "publish_key_pair.json" by default +key_store.program_key_path = "program_key.json" # Relative path from root_path, "program_key.json" by default +key_store.mapping_key_path = "mapping_key.json" # Relative path from root_path, "mapping_key.json" by default + +# [...] + +# New v2.0.0 way +[primary_network] +key_store.publish_keypair_path = "/path/to/keypair.json" # The root_path is gone, we specify the full path +# Not using separate files anymore +key_store.program_key = "LiteralProgramPubkeyInsideTheConfig" # contents of legacy program_key.json; +key_store.mapping_key = "LiteralMappingPubkeyInsideTheConfig" # contents of legacy mapping_key.json + +# [...] -```bash -# Install the Solana Tool Suite, needed for creating the key used to sign your transactions. -sh -c "$(curl -sSfL https://release.solana.com/v1.14.13/install)" +``` -# Create the key store directory. This can be any location that is convenient for you. -PYTH_KEY_STORE=$HOME/.pythd +#### Automatic Migration +If you are upgrading to agent v2.0.0 with an existing config, you can use the provided automatic migrator program: +```shell +# Build +$ cargo build --release +# Run the migrator, making sure that the key store with previous keys is reachable +$ target/release/agent-migrate-config -c .toml > my_new_config.toml +``` -# Create your keypair (pair of private/public keys) that will be used to sign your transactions. -# Pyth Network will need to permission this key, so reach out to us once you have created it. -solana-keygen new --no-bip39-passphrase --outfile $PYTH_KEY_STORE/publish_key_pair.json +#### `Could not open {mapping|program|...} key file` +This error can appear if some of your program/mapping/publish key +files are not reachable under their `key_store.*` setting values. -# Initialize the key store with the public keys of the Pyth Oracle Program on the network you wish to publish to. -PYTH_KEY_ENV=devnet # Can be devnet, testnet or mainnet -./scripts/init_key_store.sh $PYTH_KEY_ENV $PYTH_KEY_STORE -``` +Ensure that your current working directory is correct for reaching the +key store path inside your config. You may also migrate manually by +changing `key_store.*_key_path` and `key_store.publish_keypair_path` +options by hand, as described in the config example above. ## Run `cargo run --release -- --config ` will build and run the agent in a single step. diff --git a/config/config.sample.pythnet.toml b/config/config.sample.pythnet.toml index 7bd8309e..04ae619b 100644 --- a/config/config.sample.pythnet.toml +++ b/config/config.sample.pythnet.toml @@ -11,8 +11,17 @@ rpc_url = "http://api.pythnet.pyth.network:8899" # This can be omitted when oracle.subscriber_enabled is set to false. wss_url = "ws://api.pythnet.pyth.network:8900" -# Path to the key store. -key_store.root_path = "/path/to/keystore" +# Path to your publishing keypair. +key_store.publish_keypair_path = "/path/to/keypair.json" + +# Oracle program pubkey +key_store.program_key = "FsJ3A3u2vn5cTVofAjvy6y5kwABJAqYWpe4975bi2epH" + +# Oracle mapping pubkey +key_store.mapping_key = "AHtgzX45WTKfkPG53L6WYhGEXwQkN1BVknET3sVsLL8J" + +# Pythnet accumulator key +key_store.accumulator_key = "7Vbmv1jt4vyuqBZcpYPpnVhrqVe5e6ZPb6JxDcffRHUM" # Duration of the interval at which to publish updates exporter.publish_interval_duration = "400ms" @@ -25,8 +34,14 @@ exporter.publish_interval_duration = "400ms" rpc_url = "https://api.mainnet-beta.solana.com" wss_url = "wss://api.mainnet-beta.solana.com" -# Path to the key store. -key_store.root_path = "/path/to/keystore" +# Path to your publishing keypair. +key_store.publish_keypair_path = "/path/to/keypair.json" + +# Oracle program pubkey +key_store.program_key = "FsJ3A3u2vn5cTVofAjvy6y5kwABJAqYWpe4975bi2epH" + +# Oracle mapping pubkey +key_store.mapping_key = "AHtgzX45WTKfkPG53L6WYhGEXwQkN1BVknET3sVsLL8J" # Duration of the interval at which to publish updates. Default interval is 1 seconds. # exporter.publish_interval_duration = "1s" diff --git a/config/config.sample.pythtest.toml b/config/config.sample.pythtest.toml index 241e2b23..2674801e 100644 --- a/config/config.sample.pythtest.toml +++ b/config/config.sample.pythtest.toml @@ -11,8 +11,19 @@ rpc_url = "https://api.pythtest.pyth.network" # This can be omitted when oracle.subscriber_enabled is set to false. wss_url = "wss://api.pythtest.pyth.network" -# Path to the key store. -key_store.root_path = "/path/to/keystore" +# Path to your publishing keypair. +key_store.publish_keypair_path = "/path/to/keypair.json" + +# Oracle program pubkey +key_store.program_key = "8tfDNiaEyrV6Q1U4DEXrEigs9DoDtkugzFbybENEbCDz" # conformance +# key_store.program_key = "gSbePebfvPy7tRqimPoVecS2UsBvYv46ynrzWocc92s" # cross-chain + +# Oracle mapping pubkey +key_store.mapping_key = "AFmdnt9ng1uVxqCmqwQJDAYC5cKTkw8gJKSM5PnzuF6z" # conformance +# key_store.mapping_key = "BmA9Z6FjioHJPpjT39QazZyhDRUdZy2ezwx4GiDdE2u2" # cross-chain + +# Pythtest accumulator key (only for the cross-chain oracle) +# key_store.accumulator_key = "7Vbmv1jt4vyuqBZcpYPpnVhrqVe5e6ZPb6JxDcffRHUM" # Duration of the interval at which to publish updates exporter.publish_interval_duration = "400ms" @@ -25,8 +36,14 @@ exporter.publish_interval_duration = "400ms" rpc_url = "https://api.testnet.solana.com" wss_url = "wss://api.testnet.solana.com" -# Path to the key store. -key_store.root_path = "/path/to/keystore" +# Path to your publishing keypair. +key_store.publish_keypair_path = "/path/to/keypair.json" + +# Oracle program pubkey +key_store.program_key = "8tfDNiaEyrV6Q1U4DEXrEigs9DoDtkugzFbybENEbCDz" + +# Oracle mapping pubkey +key_store.mapping_key = "AFmdnt9ng1uVxqCmqwQJDAYC5cKTkw8gJKSM5PnzuF6z" # Duration of the interval at which to publish updates. Default interval is 1 seconds. # exporter.publish_interval_duration = "1s" diff --git a/config/config.toml b/config/config.toml index 388944b8..3ad19281 100644 --- a/config/config.toml +++ b/config/config.toml @@ -1,6 +1,9 @@ # Configuration for the JRPC API Websocket Server [pythd_api_server] -# The address on which the websocket API server will listen on. +# The address on which the websocket API server will listen. +# +# NOTE: non-loopback addresses must be used carefully, making sure the +# connection is not exposed for unauthorized access. listen_address = "127.0.0.1:8910" # Configuration for the primary network this agent will publish data to. In most cases this should be a Pythnet endpoint. @@ -16,19 +19,32 @@ rpc_url = "https://api.pythtest.pyth.network" # Note that api.pythtest.pyth.network is a private endpoint: please contact us for access. wss_url = "wss://api.pythtest.pyth.network" -# Path to the key store. -key_store.root_path = "/path/to/keystore" +# Path to the keypair used to publish price updates. If set to a +# non-existent file path, the system expects a keypair to be loaded +# via the remote keypair loader. If the path is valid, the remote +# keypair loading is disabled. +key_store.publish_keypair = "/path/to/keypair.json" + +# Public key of the oracle program +key_store.program_key = "RelevantOracleProgramAddress" + +# Public key of the root mapping account +key_store.mapping_key = "RelevantOracleMappingAddress" + +# Optional public key of the accumulator program (if provided) +key_store.accumulator_key = "RelevantOracleAccumulatorAddress" ### Optional fields ### # [metrics_server] # # Where to serve the quick-access dashboard and metrics. Metrics live under "/metrics" +# NOTE: non-loopback addresses must be used carefully, making sure the +# connection is not exposed for unauthorized access. # bind_address = "127.0.0.1:8888" # [remote_keypair_loader} # Where to serve the remote keypair loading endpoint, under "/primary/load_keypair" and "/secondary/load_keypair" -# # NOTE: non-loopback addresses must be used carefully, making sure the # connection is not exposed for unauthorized access. # bind_address = "127.0.0.1:9001" diff --git a/integration-tests/agent_conf.toml b/integration-tests/agent_conf.toml index 6e252cf8..13bce4a5 100644 --- a/integration-tests/agent_conf.toml +++ b/integration-tests/agent_conf.toml @@ -2,6 +2,5 @@ bind_address="0.0.0.0:8888" [primary_network] -key_store.root_path = "keystore" oracle.poll_interval_duration = "1s" exporter.transaction_monitor.poll_interval_duration = "1s" diff --git a/integration-tests/tests/test_integration.py b/integration-tests/tests/test_integration.py index 71c4e39e..e7d4f151 100644 --- a/integration-tests/tests/test_integration.py +++ b/integration-tests/tests/test_integration.py @@ -398,13 +398,25 @@ async def initialize_message_buffer_program(self, funding_keypair, sync_key_path await provider.send(tx, [parsed_funding_keypair]) @pytest.fixture - def agent_config(self, agent_keystore, tmp_path): + def agent_config(self, agent_keystore, agent_keystore_path, tmp_path): with open("agent_conf.toml") as config_file: agent_config = config_file.read() + publish_keypair_path = os.path.join(agent_keystore_path, "publish_key_pair.json") + + mapping_keypair = Keypair.from_secret_key(MAPPING_KEYPAIR) + + agent_config += f""" +key_store.publish_keypair_path = "{publish_keypair_path}" +key_store.program_key = "{ORACLE_PROGRAM}" +key_store.mapping_key = "{mapping_keypair.public_key}" +""" + # Add accumulator setting if option is enabled if USE_ACCUMULATOR: - agent_config += '\nkey_store.accumulator_key_path = "accumulator_program_key.json"' + agent_config += f'\nkey_store.accumulator_key = "{MESSAGE_BUFFER_PROGRAM}"' + + LOGGER.debug(f"Built agent config:\n{agent_config}") path = os.path.join(tmp_path, "agent_conf.toml") @@ -413,12 +425,35 @@ def agent_config(self, agent_keystore, tmp_path): return path + @pytest.fixture + def agent_legacy_config(self, agent_keystore, agent_keystore_path, tmp_path): + """ + Prepares a legacy v1.x.x config for testing agent-migrate-config + """ + with open("agent_conf.toml") as config_file: + agent_config = config_file.read() + + agent_config += f'\nkey_store.root_path = "{agent_keystore_path}"' + + if USE_ACCUMULATOR: + # Add accumulator setting to verify that it is inlined as well + agent_config += f'\nkey_store.accumulator_key_path = "accumulator_program_key.json"' + + LOGGER.debug(f"Built legacy agent config:\n{agent_config}") + + path = os.path.join(tmp_path, "agent_conf_legacy.toml") + + with open(path, 'w') as f: + f.write(agent_config) + + return path + @pytest.fixture def agent(self, sync_accounts, agent_keystore, tmp_path, initialize_message_buffer_program, agent_config): LOGGER.debug("Building agent binary") - self.run("cargo build --release") + self.run("cargo build --release --bin agent") log_dir = os.path.join(tmp_path, "agent_logs") LOGGER.debug("Launching agent logging to %s", log_dir) @@ -437,7 +472,7 @@ def agent_hotload(self, sync_accounts, agent_keystore, agent_keystore_path, tmp_ os.remove(os.path.join(agent_keystore_path, "publish_key_pair.json")) LOGGER.debug("Building hotload agent binary") - self.run("cargo build --release") + self.run("cargo build --release --bin agent") log_dir = os.path.join(tmp_path, "agent_logs") LOGGER.debug("Launching hotload agent logging to %s", log_dir) @@ -455,6 +490,10 @@ async def client(self, agent): yield client await client.close() + @pytest_asyncio.fixture + async def client_no_spawn(self): + return PythAgentClient(address="ws://localhost:8910") + @pytest_asyncio.fixture async def client_hotload(self, agent_hotload): client = PythAgentClient(address="ws://localhost:8910") @@ -462,6 +501,16 @@ async def client_hotload(self, agent_hotload): yield client await client.close() + @pytest.fixture + def agent_migrate_config_binary(self): + LOGGER.debug("Building agent-migrate-config binary") + self.run("cargo build --release --bin agent-migrate-config") + + os.environ["RUST_BACKTRACE"] = "full" + os.environ["RUST_LOG"] = "debug" + + return os.path.abspath("../target/release/agent-migrate-config") + class TestUpdatePrice(PythTest): @@ -605,7 +654,7 @@ async def test_update_price_discards_unpermissioned(self, client: PythAgentClien assert final_price_account_unperm["conf"] == 0 assert final_price_account_unperm["status"] == "unknown" - # Confirm agent logs contain the relevant WARN log + # Confirm agent logs contain the relevant log with open(f"{tmp_path}/agent_logs/stdout") as f: contents = f.read() lines_found = 0 @@ -645,3 +694,41 @@ async def test_publish_forever(self, client: PythAgentClient, tmp_path): # Send an "update_price" request await client.update_price(price_account, 47, 2, "trading") time.sleep(1) + + @pytest.mark.asyncio + async def test_agent_migrate_config(self, + agent_keystore, + agent_legacy_config, + agent_migrate_config_binary, + client_no_spawn: PythAgentClient, + initialize_message_buffer_program, + sync_accounts, + tmp_path, + ): + os.environ["RUST_BACKTRACE"] = "full" + os.environ["RUST_LOG"] = "debug" + + # Migrator must run successfully (run() raises on error) + new_config = self.run(f"{agent_migrate_config_binary} -c {agent_legacy_config}").stdout.strip() + + LOGGER.debug(f"Successfully migrated legacy config to:\n{new_config}") + + # Overwrite legacy config with the migrated version. + # + # NOTE: assumes 'w' erases the file before access) + with open(agent_legacy_config, 'w') as f: + f.write(new_config) + f.flush() + + self.run("cargo build --release --bin agent") + + log_dir = os.path.join(tmp_path, "agent_logs") + + # We start the agent manually to pass it the updated legacy config + with self.spawn(f"../target/release/agent --config {agent_legacy_config}", log_dir=log_dir): + time.sleep(3) + await client_no_spawn.connect() + + # Continue with the simple test case, which must succeed + await self.test_update_price_simple(client_no_spawn) + await client_no_spawn.close() diff --git a/src/agent.rs b/src/agent.rs index f326922e..afc9439e 100644 --- a/src/agent.rs +++ b/src/agent.rs @@ -219,15 +219,19 @@ pub mod config { }; /// Configuration for all components of the Agent - #[derive(Default, Deserialize, Debug)] - #[serde(default)] + #[derive(Deserialize, Debug)] pub struct Config { + #[serde(default)] pub channel_capacities: ChannelCapacities, pub primary_network: network::Config, pub secondary_network: Option, + #[serde(default)] pub pythd_adapter: pythd::adapter::Config, + #[serde(default)] pub pythd_api_server: pythd::api::rpc::Config, + #[serde(default)] pub metrics_server: metrics::Config, + #[serde(default)] pub remote_keypair_loader: remote_keypair_loader::Config, } diff --git a/src/agent/solana.rs b/src/agent/solana.rs index b83d2fc6..86ed9ff7 100644 --- a/src/agent/solana.rs +++ b/src/agent/solana.rs @@ -36,38 +36,40 @@ pub mod network { }, }; + pub fn default_rpc_url() -> String { + "http://localhost:8899".to_string() + } + + pub fn default_wss_url() -> String { + "http://localhost:8900".to_string() + } + + pub fn default_rpc_timeout() -> Duration { + Duration::from_secs(10) + } + /// Configuration for a network #[derive(Clone, Serialize, Deserialize, Debug)] - #[serde(default)] pub struct Config { /// HTTP RPC endpoint + #[serde(default = "default_rpc_url")] pub rpc_url: String, /// WSS RPC endpoint + #[serde(default = "default_wss_url")] pub wss_url: String, /// Timeout for the requests to the RPC - #[serde(with = "humantime_serde")] + #[serde(with = "humantime_serde", default = "default_rpc_timeout")] pub rpc_timeout: Duration, /// Keystore pub key_store: key_store::Config, /// Configuration for the Oracle reading data from this network + #[serde(default)] pub oracle: oracle::Config, /// Configuration for the Exporter publishing data to this network + #[serde(default)] pub exporter: exporter::Config, } - impl Default for Config { - fn default() -> Self { - Self { - rpc_url: "http://localhost:8899".to_string(), - wss_url: "ws://localhost:8900".to_string(), - rpc_timeout: Duration::from_secs(10), - key_store: Default::default(), - oracle: Default::default(), - exporter: Default::default(), - } - } - } - pub fn spawn_network( config: Config, local_store_tx: Sender, @@ -116,8 +118,11 @@ mod key_store { Result, }, serde::{ + de::Error, Deserialize, + Deserializer, Serialize, + Serializer, }, slog::Logger, solana_sdk::{ @@ -136,34 +141,31 @@ mod key_store { }; #[derive(Clone, Serialize, Deserialize, Debug)] - #[serde(default)] pub struct Config { - /// Root directory of the KeyStore - pub root_path: PathBuf, - /// Path to the keypair used to publish price updates, - /// relative to the root. If set to a non-existent file path, - /// the system expects a keypair to be loaded via the remote - /// keypair loader. If the path is valid, the remote keypair - /// loading is disabled. + /// Path to the keypair used to publish price updates. If set + /// to a non-existent file path, the system expects a keypair + /// to be loaded via the remote keypair loader. If the path is + /// valid, the remote keypair loading is disabled. pub publish_keypair_path: PathBuf, - /// Path to the public key of the Oracle program, relative to the root - pub program_key_path: PathBuf, - /// Path to the public key of the root mapping account, relative to the root - pub mapping_key_path: PathBuf, - /// Path to the public key of the accumulator program, relative to the root. - pub accumulator_key_path: Option, - } - - impl Default for Config { - fn default() -> Self { - Self { - root_path: Default::default(), - publish_keypair_path: "publish_key_pair.json".into(), - program_key_path: "program_key.json".into(), - mapping_key_path: "mapping_key.json".into(), - accumulator_key_path: None, - } - } + /// The public key of the Oracle program + #[serde( + serialize_with = "pubkey_string_ser", + deserialize_with = "pubkey_string_de" + )] + pub program_key: Pubkey, + /// The public key of the root mapping account + #[serde( + serialize_with = "pubkey_string_ser", + deserialize_with = "pubkey_string_de" + )] + pub mapping_key: Pubkey, + /// The public key of the accumulator program. + #[serde( + serialize_with = "opt_pubkey_string_ser", + deserialize_with = "opt_pubkey_string_de", + default + )] + pub accumulator_key: Option, } pub struct KeyStore { @@ -181,41 +183,59 @@ mod key_store { impl KeyStore { pub fn new(config: Config, logger: &Logger) -> Result { - let full_keypair_path = config.root_path.join(config.publish_keypair_path); - - let publish_keypair = match keypair::read_keypair_file(&full_keypair_path) { + let publish_keypair = match keypair::read_keypair_file(&config.publish_keypair_path) { Ok(k) => Some(k), Err(e) => { warn!(logger, "Reading publish keypair returned an error. Waiting for a remote-loaded key before publishing."; - "full_keypair_path" => full_keypair_path.to_str(), "error" => e.to_string()); + "publish_keypair_path" => config.publish_keypair_path.display(), "error" => e.to_string()); None } }; - let accumulator_key: Option = - if let Some(key_path) = config.accumulator_key_path { - Some( - Self::pubkey_from_path(config.root_path.join(key_path)) - .context("Reading accumulator key")?, - ) - } else { - None - }; - Ok(KeyStore { publish_keypair, - program_key: Self::pubkey_from_path(config.root_path.join(config.program_key_path)) - .context("reading program key")?, - mapping_key: Self::pubkey_from_path(config.root_path.join(config.mapping_key_path)) - .context("reading mapping key")?, - accumulator_key, + program_key: config.program_key, + mapping_key: config.mapping_key, + accumulator_key: config.accumulator_key, }) } + } + + // Helper methods for stringified SOL addresses + + fn pubkey_string_ser(k: &Pubkey, ser: S) -> Result + where + S: Serializer, + { + ser.serialize_str(&k.to_string()) + } + + fn pubkey_string_de<'de, D>(de: D) -> Result + where + D: Deserializer<'de>, + { + let pubkey_string = String::deserialize(de)?; + let pubkey = Pubkey::from_str(&pubkey_string).map_err(D::Error::custom)?; + Ok(pubkey) + } + + fn opt_pubkey_string_ser(k_opt: &Option, ser: S) -> Result + where + S: Serializer, + { + let k_str_opt = (*k_opt).map(|k| k.to_string()); + + Option::::serialize(&k_str_opt, ser) + } - fn pubkey_from_path(path: impl AsRef) -> Result { - let contents = fs::read_to_string(path)?; - Pubkey::from_str(contents.trim()).map_err(|e| e.into()) + fn opt_pubkey_string_de<'de, D>(de: D) -> Result, D::Error> + where + D: Deserializer<'de>, + { + match Option::::deserialize(de)? { + Some(k) => Ok(Some(Pubkey::from_str(&k).map_err(D::Error::custom)?)), + None => Ok(None), } } } diff --git a/src/bin/agent_migrate_config.rs b/src/bin/agent_migrate_config.rs new file mode 100644 index 00000000..a24a63af --- /dev/null +++ b/src/bin/agent_migrate_config.rs @@ -0,0 +1,213 @@ +use { + anyhow::{ + anyhow, + Context, + Result, + }, + clap::Parser, + solana_sdk::pubkey::Pubkey, + std::{ + fs::File, + io::{ + Read, + Write, + }, + path::PathBuf, + str::FromStr, + }, + toml_edit::{ + value, + Document, + Item, + }, +}; + +#[derive(Parser, Debug)] +#[command(author, version, about = "1.x.x -> 2.0.0 pyth-agent config migrator")] +struct Args { + /// Config path to be migrated + #[arg(short, long)] + config: PathBuf, +} + +pub fn main() -> Result<()> { + let args = Args::parse(); + + eprintln!("Loading old config from {}", args.config.display()); + + let mut f = File::open(args.config).context("Could not open the config file")?; + + let mut old_cfg_contents = String::new(); + + f.read_to_string(&mut old_cfg_contents)?; + + let mut doc: Document = old_cfg_contents + .parse() + .context("Could not parse config file contents as TOML")?; + + let primary_network = doc + .get_mut("primary_network") + .ok_or_else(|| anyhow::anyhow!("Could not read mandatory primary_network section"))?; + + eprint!("Migrating primary_network..."); + std::io::stderr().flush()?; + migrate_network(primary_network)?; + eprintln!("OK"); + + if let Some(secondary_network) = doc.get_mut("secondary_network") { + eprint!("Migrating secondary_network..."); + std::io::stdout().flush()?; + migrate_network(secondary_network)?; + eprintln!("OK"); + } else { + eprintln!("secondary_network not defined, moving on"); + } + + eprintln!("Migration OK. Result:"); + std::io::stderr().flush()?; + + println!("{}", doc.to_string()); + + Ok(()) +} + +/// Generalized migration routine for primary/secondary_network TOML +/// sections. v1.x.x defaults are supplied if unspecified in order to +/// reach the file-based pubkeys on disk. +pub fn migrate_network(network_config: &mut Item) -> Result<()> { + // Retrieve all key store (sub)paths or supply defaults + let key_store_root_path: PathBuf = { + let root_item = network_config + .get("key_store") + .and_then(|ks| ks.get("root_path")) + .cloned() + // v1.4.0 used PathBuf::default(), meaning current working directory, if unspecified. + .unwrap_or(value(".")); + + let root_str = root_item + .as_str() + .ok_or(anyhow!("Could not parse key_store.root_path"))?; + + PathBuf::from(root_str.to_owned()) + }; + + let publish_keypair_relpath: PathBuf = { + let publish_item = network_config + .get("key_store") + .and_then(|ks| ks.get("publish_keypair_path")) + .cloned() + .unwrap_or(value("publish_key_pair.json")); + + let publish_str = publish_item + .as_str() + .ok_or(anyhow!("Could not parse key_store.publish_keypair"))?; + + PathBuf::from(publish_str) + }; + + let program_key_relpath: PathBuf = { + let program_item = network_config + .get("key_store") + .and_then(|ks| ks.get("program_key_path")) + .cloned() + .unwrap_or(value("program_key.json")); + + let program_str = program_item + .as_str() + .ok_or(anyhow!("Could not parse key_store.program_key"))?; + + PathBuf::from(program_str) + }; + + let mapping_key_relpath: PathBuf = { + let mapping_item = network_config + .get("key_store") + .and_then(|ks| ks.get("mapping_key_path")) + .cloned() + .unwrap_or(value("mapping_key.json")); + + let mapping_str = mapping_item + .as_str() + .ok_or(anyhow!("Could not parse key_store.mapping_key"))?; + + PathBuf::from(mapping_str) + }; + + let accumulator_key_relpath: Option = { + let maybe_item = network_config + .get("key_store") + .and_then(|ks| ks.get("accumulator_key_path")); + + match maybe_item { + Some(item) => { + let item_str = item.as_str().ok_or(anyhow!( + "Could not parse existing key_store.accumulator_key_path" + ))?; + Some(PathBuf::from(item_str)) + } + None => None, + } + }; + + // We're done reading legacy key store values, remove the + // subsection from network config if present. + if let Some(ks_table_like) = network_config + .get_mut("key_store") + .and_then(|ks| ks.as_table_like_mut()) + { + ks_table_like.clear(); + } + + // Attach publish keypair path to legacy key store root path + let mut publish_keypair_path = key_store_root_path.clone(); + publish_keypair_path.push(publish_keypair_relpath); + + // Extract pubkeys from legacy file paths for other key store values + let mut program_key_path = key_store_root_path.clone(); + program_key_path.push(program_key_relpath); + let mut program_key_str = String::new(); + File::open(&program_key_path) + .context(format!( + "Could not open program key file at {}", + program_key_path.display() + ))? + .read_to_string(&mut program_key_str)?; + let program_key = + Pubkey::from_str(program_key_str.trim()).context("Could not parse program key")?; + + let mut mapping_key_path = key_store_root_path.clone(); + mapping_key_path.push(mapping_key_relpath); + let mut mapping_key_str = String::new(); + File::open(mapping_key_path) + .context("Could not open mapping key file")? + .read_to_string(&mut mapping_key_str)?; + let mapping_key = + Pubkey::from_str(mapping_key_str.trim()).context("Could not parse mapping key")?; + + let accumulator_key = if let Some(relpath) = accumulator_key_relpath { + let mut accumulator_key_path = key_store_root_path.clone(); + accumulator_key_path.push(relpath); + let mut accumulator_key_str = String::new(); + File::open(accumulator_key_path) + .context("Could not open accumulator key file")? + .read_to_string(&mut accumulator_key_str)?; + let accumulator_key = Pubkey::from_str(accumulator_key_str.trim()) + .context("Could not parse accumulator key")?; + + Some(accumulator_key) + } else { + None + }; + + // Inline new key store pubkeys in the section + network_config["key_store"]["publish_keypair_path"] = + value(publish_keypair_path.display().to_string()); + network_config["key_store"]["program_key"] = value(program_key.to_string()); + network_config["key_store"]["mapping_key"] = value(mapping_key.to_string()); + + if let Some(k) = accumulator_key { + network_config["key_store"]["accumulator_key"] = value(k.to_string()); + } + + Ok(()) +}