Merge pull request #1452 from kate-goldenring/cloud-plugin-porting

Move Cloud commands out of Spin CLI and into a plugin
This commit is contained in:
Kate Goldenring 2023-06-07 10:40:25 -07:00 committed by GitHub
commit eb5e826dc2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 610 additions and 2468 deletions

View File

@ -118,13 +118,6 @@ jobs:
env:
CARGO_INCREMENTAL: 0
- name: Fermyon Platform Integration Tests
run: |
make test-fermyon-platform
env:
RUST_LOG: spin=trace
CARGO_INCREMENTAL: 0
test-go:
name: Test Spin SDK - Go
runs-on: ubuntu-latest

View File

@ -55,7 +55,7 @@ jobs:
use-tool-cache: true
- name: Cargo Tarpaulin
run: cargo tarpaulin --follow-exec --skip-clean -t 6000 --out xml --features openssl/vendored,default,fermyon-platform
run: cargo tarpaulin --follow-exec --skip-clean -t 6000 --out xml --features openssl/vendored,default
env:
RUST_LOG: spin=trace

61
Cargo.lock generated
View File

@ -755,37 +755,6 @@ dependencies = [
"winapi",
]
[[package]]
name = "cloud"
version = "1.3.0-pre0"
dependencies = [
"anyhow",
"cloud-openapi",
"mime_guess",
"reqwest",
"semver 1.0.16",
"serde",
"serde_json",
"tokio",
"tokio-util 0.7.7",
"tracing",
"uuid",
]
[[package]]
name = "cloud-openapi"
version = "0.1.0"
source = "git+https://github.com/fermyon/cloud-openapi?rev=1a7bc0316d2dd863d9090f201543215b36db7017#1a7bc0316d2dd863d9090f201543215b36db7017"
dependencies = [
"reqwest",
"serde",
"serde_derive",
"serde_json",
"serde_with",
"url",
"uuid",
]
[[package]]
name = "cmake"
version = "0.1.49"
@ -4786,34 +4755,6 @@ dependencies = [
"serde",
]
[[package]]
name = "serde_with"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ea48c9627169d206b35905699f513f513c303ab9d964a59b44fdcf66c1d1ab7"
dependencies = [
"base64 0.13.1",
"chrono",
"hex",
"indexmap",
"serde",
"serde_json",
"serde_with_macros",
"time 0.3.20",
]
[[package]]
name = "serde_with_macros"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e6b7e52858f9f06c25e1c566bbb4ab428200cb3b30053ea09dc50837de7538b"
dependencies = [
"darling",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "sha1"
version = "0.6.1"
@ -5110,8 +5051,6 @@ dependencies = [
"cargo-target-dep",
"chrono",
"clap 3.2.24",
"cloud",
"cloud-openapi",
"comfy-table",
"ctrlc",
"dialoguer 0.10.3",

View File

@ -18,8 +18,6 @@ bindle = { workspace = true }
bytes = "1.1"
chrono = "0.4"
clap = { version = "3.2.24", features = ["derive", "env"] }
cloud = { path = "crates/cloud" }
cloud-openapi = { git = "https://github.com/fermyon/cloud-openapi", rev = "1a7bc0316d2dd863d9090f201543215b36db7017" }
comfy-table = "5.0"
ctrlc = { version = "3.2", features = ["termination"] }
dialoguer = "0.10"
@ -101,7 +99,6 @@ outbound-redis-tests = []
config-provider-tests = []
outbound-pg-tests = []
outbound-mysql-tests = []
fermyon-platform = []
[workspace]
members = [

View File

@ -48,11 +48,6 @@ test-unit:
test-integration: test-kv
RUST_LOG=$(LOG_LEVEL) cargo test --test integration --no-fail-fast -- --skip spinup_tests --skip cloud_tests --nocapture
.PHONY: test-fermyon-platform
test-fermyon-platform:
RUST_LOG=$(LOG_LEVEL) cargo test --test integration --features fermyon-platform --no-fail-fast -- integration_tests::test_dependencies --skip spinup_tests --nocapture
RUST_LOG=$(LOG_LEVEL) cargo test --test integration --features fermyon-platform --no-fail-fast -- --skip integration_tests::test_dependencies --skip spinup_tests --nocapture
.PHONY: test-spin-up
test-spin-up:
docker build -t spin-e2e-tests --build-arg BUILD_SPIN=$(E2E_BUILD_SPIN) -f $(E2E_TESTS_DOCKERFILE) .

View File

@ -1,18 +0,0 @@
[package]
name = "cloud"
version = { workspace = true }
authors = { workspace = true }
edition = { workspace = true }
[dependencies]
anyhow = "1.0"
cloud-openapi = { git = "https://github.com/fermyon/cloud-openapi", rev = "1a7bc0316d2dd863d9090f201543215b36db7017" }
mime_guess = { version = "2.0" }
reqwest = { version = "0.11", features = ["stream"] }
semver = "1.0"
serde = {version = "1.0", features = ["derive"]}
serde_json = "1.0"
tokio = { version = "1.17", features = ["full"] }
tokio-util = { version = "0.7.3", features = ["codec"] }
tracing = { workspace = true }
uuid = "1"

View File

@ -1,401 +0,0 @@
use anyhow::{Context, Result};
use cloud_openapi::{
apis::{
self,
apps_api::{api_apps_get, api_apps_id_delete, api_apps_post},
auth_tokens_api::api_auth_tokens_refresh_post,
channels_api::{
api_channels_get, api_channels_id_delete, api_channels_id_get,
api_channels_id_logs_get, api_channels_post, ApiChannelsIdPatchError,
},
configuration::{ApiKey, Configuration},
device_codes_api::api_device_codes_post,
key_value_pairs_api::api_key_value_pairs_post,
revisions_api::{api_revisions_get, api_revisions_post},
Error, ResponseContent,
},
models::{
AppItemPage, ChannelItem, ChannelItemPage, ChannelRevisionSelectionStrategy,
CreateAppCommand, CreateChannelCommand, CreateDeviceCodeCommand, CreateKeyValuePairCommand,
DeviceCodeItem, GetChannelLogsVm, RefreshTokenCommand, RegisterRevisionCommand,
RevisionItemPage, TokenInfo, UpdateEnvironmentVariableDto,
},
};
use reqwest::header;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use uuid::Uuid;
const JSON_MIME_TYPE: &str = "application/json";
pub struct Client {
configuration: Configuration,
}
#[derive(Serialize, Deserialize, Clone, Debug, Default)]
pub struct ConnectionConfig {
pub insecure: bool,
pub token: String,
pub url: String,
}
impl Client {
pub fn new(conn_info: ConnectionConfig) -> Self {
let mut headers = header::HeaderMap::new();
headers.insert(header::ACCEPT, JSON_MIME_TYPE.parse().unwrap());
headers.insert(header::CONTENT_TYPE, JSON_MIME_TYPE.parse().unwrap());
let base_path = match conn_info.url.strip_suffix('/') {
Some(s) => s.to_owned(),
None => conn_info.url,
};
let configuration = Configuration {
base_path,
user_agent: Some(format!(
"{}/{}",
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_VERSION")
)),
client: reqwest::Client::builder()
.danger_accept_invalid_certs(conn_info.insecure)
.default_headers(headers)
.build()
.unwrap(),
basic_auth: None,
oauth_access_token: None,
bearer_access_token: None,
api_key: Some(ApiKey {
prefix: Some("Bearer".to_owned()),
key: conn_info.token,
}),
};
Self { configuration }
}
pub async fn create_device_code(&self, client_id: Uuid) -> Result<DeviceCodeItem> {
api_device_codes_post(
&self.configuration,
CreateDeviceCodeCommand { client_id },
None,
)
.await
.map_err(format_response_error)
}
pub async fn login(&self, token: String) -> Result<TokenInfo> {
// When the new OpenAPI specification is released, manually crafting
// the request should no longer be necessary.
let response = self
.configuration
.client
.post(format!("{}/api/auth-tokens", self.configuration.base_path))
.body(
serde_json::json!(
{
"provider": "DeviceFlow",
"clientId": "583e63e9-461f-4fbe-a246-23e0fb1cad10",
"providerCode": token,
}
)
.to_string(),
)
.send()
.await?;
serde_json::from_reader(response.bytes().await?.as_ref())
.context("Failed to parse response")
}
pub async fn refresh_token(&self, token: String, refresh_token: String) -> Result<TokenInfo> {
api_auth_tokens_refresh_post(
&self.configuration,
RefreshTokenCommand {
token,
refresh_token,
},
None,
)
.await
.map_err(format_response_error)
}
pub async fn add_app(&self, name: &str, storage_id: &str) -> Result<Uuid> {
api_apps_post(
&self.configuration,
CreateAppCommand {
name: name.to_string(),
storage_id: storage_id.to_string(),
},
None,
)
.await
.map_err(format_response_error)
}
pub async fn remove_app(&self, id: String) -> Result<()> {
api_apps_id_delete(&self.configuration, &id, None)
.await
.map_err(format_response_error)
}
pub async fn list_apps(&self) -> Result<AppItemPage> {
api_apps_get(&self.configuration, None, None, None, None, None, None)
.await
.map_err(format_response_error)
}
pub async fn get_channel_by_id(&self, id: &str) -> Result<ChannelItem> {
api_channels_id_get(&self.configuration, id, None)
.await
.map_err(format_response_error)
}
pub async fn list_channels(&self) -> Result<ChannelItemPage> {
api_channels_get(
&self.configuration,
Some(""),
None,
None,
Some("Name"),
None,
None,
)
.await
.map_err(format_response_error)
}
pub async fn list_channels_next(&self, previous: &ChannelItemPage) -> Result<ChannelItemPage> {
api_channels_get(
&self.configuration,
Some(""),
Some(previous.page_index + 1),
Some(previous.page_size),
Some("Name"),
None,
None,
)
.await
.map_err(format_response_error)
}
pub async fn add_channel(
&self,
app_id: Uuid,
name: String,
revision_selection_strategy: ChannelRevisionSelectionStrategy,
range_rule: Option<String>,
active_revision_id: Option<Uuid>,
) -> anyhow::Result<Uuid> {
let command = CreateChannelCommand {
app_id,
name,
revision_selection_strategy,
range_rule: Some(range_rule),
active_revision_id: Some(active_revision_id),
};
api_channels_post(&self.configuration, command, None)
.await
.map_err(format_response_error)
}
pub async fn patch_channel(
&self,
id: Uuid,
name: Option<String>,
revision_selection_strategy: Option<ChannelRevisionSelectionStrategy>,
range_rule: Option<String>,
active_revision_id: Option<Uuid>,
environment_variables: Option<Vec<UpdateEnvironmentVariableDto>>,
) -> anyhow::Result<()> {
let patch_channel_command = PatchChannelCommand {
channel_id: Some(id),
name,
revision_selection_strategy,
range_rule,
active_revision_id,
environment_variables,
};
let local_var_configuration = &self.configuration;
let local_var_client = &local_var_configuration.client;
let local_var_uri_str = format!(
"{}/api/channels/{id}",
local_var_configuration.base_path,
id = apis::urlencode(id.to_string())
);
let mut local_var_req_builder =
local_var_client.request(reqwest::Method::PATCH, local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = local_var_configuration.user_agent {
local_var_req_builder = local_var_req_builder
.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = local_var_configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("Authorization", local_var_value);
};
local_var_req_builder = local_var_req_builder.json(&patch_channel_command);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
Ok(())
} else {
let local_var_entity: Option<ApiChannelsIdPatchError> =
serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent {
status: local_var_status,
content: local_var_content,
entity: local_var_entity,
};
Err(format_response_error(Error::ResponseError(local_var_error)))
}
}
pub async fn remove_channel(&self, id: String) -> Result<()> {
api_channels_id_delete(&self.configuration, &id, None)
.await
.map_err(format_response_error)
}
pub async fn channel_logs(&self, id: String) -> Result<GetChannelLogsVm> {
api_channels_id_logs_get(&self.configuration, &id, None, None)
.await
.map_err(format_response_error)
}
pub async fn add_revision(
&self,
app_storage_id: String,
revision_number: String,
) -> anyhow::Result<()> {
api_revisions_post(
&self.configuration,
RegisterRevisionCommand {
app_storage_id,
revision_number,
},
None,
)
.await
.map_err(format_response_error)
}
pub async fn list_revisions(&self) -> anyhow::Result<RevisionItemPage> {
api_revisions_get(&self.configuration, None, None, None)
.await
.map_err(format_response_error)
}
pub async fn list_revisions_next(
&self,
previous: &RevisionItemPage,
) -> anyhow::Result<RevisionItemPage> {
api_revisions_get(
&self.configuration,
Some(previous.page_index + 1),
Some(previous.page_size),
None,
)
.await
.map_err(format_response_error)
}
pub async fn add_key_value_pair(
&self,
app_id: Uuid,
store_name: String,
key: String,
value: String,
) -> anyhow::Result<()> {
api_key_value_pairs_post(
&self.configuration,
CreateKeyValuePairCommand {
app_id,
store_name,
key,
value,
},
None,
)
.await
.map_err(format_response_error)
}
}
#[derive(Deserialize, Debug)]
struct ValidationExceptionMessage {
title: String,
errors: HashMap<String, Vec<String>>,
}
#[derive(Deserialize, Debug)]
struct CloudProblemDetails {
detail: String,
}
fn format_response_error<T>(e: Error<T>) -> anyhow::Error {
match e {
Error::ResponseError(r) => {
// Validation failures are distinguished by the presence of `errors` so try that first
if let Ok(m) = serde_json::from_str::<ValidationExceptionMessage>(&r.content) {
anyhow::anyhow!("{} {:?}", m.title, m.errors)
} else if let Ok(d) = serde_json::from_str::<CloudProblemDetails>(&r.content) {
anyhow::anyhow!("{}", d.detail)
} else {
anyhow::anyhow!("response status code: {}", r.status)
}
}
Error::Serde(err) => {
anyhow::anyhow!(format!("could not parse JSON object: {}", err))
}
_ => anyhow::anyhow!(e.to_string()),
}
}
#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
pub struct PatchChannelCommand {
#[serde(rename = "channelId", skip_serializing_if = "Option::is_none")]
pub channel_id: Option<uuid::Uuid>,
#[serde(
rename = "environmentVariables",
skip_serializing_if = "Option::is_none"
)]
pub environment_variables: Option<Vec<UpdateEnvironmentVariableDto>>,
#[serde(rename = "name", skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(
rename = "revisionSelectionStrategy",
skip_serializing_if = "Option::is_none"
)]
pub revision_selection_strategy: Option<ChannelRevisionSelectionStrategy>,
#[serde(rename = "rangeRule", skip_serializing_if = "Option::is_none")]
pub range_rule: Option<String>,
#[serde(rename = "activeRevisionId", skip_serializing_if = "Option::is_none")]
pub active_revision_id: Option<uuid::Uuid>,
}
impl PatchChannelCommand {
pub fn new() -> PatchChannelCommand {
PatchChannelCommand {
channel_id: None,
environment_variables: None,
name: None,
revision_selection_strategy: None,
range_rule: None,
active_revision_id: None,
}
}
}

View File

@ -1 +0,0 @@
pub mod client;

View File

@ -0,0 +1,161 @@
title = "SIP 014 - "Cloud Plugin"
template = "main"
date = "2023-05-10T23:09:00Z"
---
Summary: This improvement proposal describes the plan to move the cloud-specific
functionality from the core Spin command-line interface to a separate plugin.
Owners: matt.fisher@fermyon.com
Created: May 10, 2023
## Background
The intended outcome of this SIP is to provide a set of recommendations to move
the core logic of `spin login` and `spin deploy` into a separate `spin cloud`
plugin, while continuing to provide a great out-of-the-box experience for those
wanting to deploy their applications to the Fermyon Cloud.
## Proposal
This document proposes to move two commands from the Spin command-line interface
to a separate plugin:
1. `spin login`
2. `spin deploy`
These commands will be moved to a separate `spin cloud` plugin. This document
will also recommend possible approaches to ensure these commands remain easily
accessible to new and existing users of the Spin command-line interface.
This enables the Spin team to release the core Spin runtime and the Spin
command-line interface as stable, while also enabling the functionality of the
Fermyon Cloud to change and iterate over time.
## Rationale
Several commands were introduced to the Spin CLI, making it very simple for
someone to deploy their application to the Fermyon Cloud:
- `spin login`
- `spin deploy`
These commands are orthogonal to the concerns of Spin's core runtime. These
commands involve the packaging and distribution of a Spin application to the
Fermyon Cloud. They are considered "adjacent" to the Spin user experience as
they do not assist the developer with writing their application, nor do they
relate to Spin's core runtime; they provide a simple experience to ship their
application to the Fermyon Cloud.
`spin login` and `spin deploy` communicate with the Fermyon Cloud API. As new
features are introduced to the Fermyon Cloud, the API may change and evolve over
time.
Building a simple, delightful on-ramp experience for the Fermyon Cloud remains
top priority. It is especially important that we continue to provide a very
simple on-ramp experience so users can readily deploy their Spin applications to
a production-grade system.
Spins existing plug-in system allows the Spin community to add and extend the
functionality of Spin without introducing changes to the Spin command-line
interface. Plug-ins are described in more detail in [SIP 006 - Spin
Plugins](./006-spin-plugins.md). This allows us to ship `spin login` and `spin
deploy` as separate, discrete functionality while ensuring users can still
access its functionality through familiar tooling.
## Specification
The proposal herein suggests that we re-release the core logic of `spin login`
and `spin deploy` under a separate `spin cloud` plugin. Spin will alias the
existing `spin login` and `spin deploy` commands to their respective `spin
cloud` counterparts to retain backwards compatibility.
When a user executes `spin login` or `spin deploy` and the cloud plugin is not
installed on their system, Spin will inform the user that the `spin cloud`
plugin has not been installed, then install the plugin.
```console
$ spin deploy
The `cloud` plugin is required. Installing now.
Plugin 'cloud' was installed successfully!
Uploading cloud_start version 0.1.0+XXXXXXXX...
Deploying...
Waiting for application to become ready... ready
Available Routes:
cloud-start: https://cloud-start-xxxxxxxx.fermyon.app/ (wildcard)
```
## Future design considerations
### `spin cloud config`
An early prototype of the `spin cloud` plugin proposed several changes to the
`spin cloud deploy` command, including a new `spin cloud config` command. The
proposed command would configure the "current" Spin app for deployment with an
interactive prompt. This command would be optional, and calling `spin cloud
deploy` on an application that was not yet configured would invoke `spin cloud
configure` in the background. The experience looked something like this:
```console
$ spin cloud deploy
The current Spin app hasn't been set up for deployment yet.
Pick a new or existing deployment config:
> Fermyon Cloud - New Deployment
Fermyon Cloud - 'spicy-meatballs'
Creating new deployment...
New deployment created: 'mighty-hamster'
[...SNIP normal deploy flow...]
```
While the move to a separate `spin cloud` plugin does not reject this idea
outright, the goal of this SIP is to make the least invasive change to the
existing `spin login` and `spin deploy` experience. Future iterations to the
`spin login` and `spin deploy` experience can be addressed in future updates to
the plugin.
In fact, the movement to a separate `spin cloud` plugin grants us the
flexibility to make changes to the core `spin deploy` experience without forcing
us to wait until Spin 2.0. If anything, this proposal enables us to make these
changes to the `spin cloud` plugin without waiting for a new release of Spin.
### A generic `spin cloud` plugin supporting multiple cloud providers
One recommendation was to design `spin cloud` in a generic fashion. In this
manner, "cloud providers" (including Fermyon Cloud) would integrate with `spin
cloud`. Customers would use `spin cloud login` and `spin cloud deploy` to
deploy Spin applications to their hosting platform of choice.
Rather than a generic plugin that prescribes a command flow for all clouds, we
hope partners come to us to add their own plugin for deploying Spin applications
to their cloud (and we are open to the idea of collaborating on such a
project!). For the initial launch, the goal of this SIP is to make the least
invasive change to the existing `spin login` and `spin deploy` experience.
### Will users be able to run CRUD operations on Cloud specific objects (like a KV store)?
The goal of this SIP is to find a new home for `spin login` and `spin deploy`.
Future iterations to the `spin cloud` plugin (such as a `spin cloud kv` command)
may be provided in future updates to the plugin.
## Open Questions
> How do we ensure the `spin cloud` plugin is kept up-to-date? Do we ask the
> user to run `spin plugin update`? Do we inform them that a new version of the
> plugin is available? Do we update the plugin for the user?
The goal of this SIP is to find a new home for `spin login` and `spin deploy`.
Future iterations to the plugin system can be handled separately as a feature
enhancement. For the time being, asking the user to run `spin plugin update`
aligns with the current plugin model.
> How do we install the plugin? Install on first invocation of `spin login` or
> `spin deploy`? Install the first time the user runs a regular `spin` command?
Per the current plugin system, we will return an error when the user attempts to
run a command when the plugin does not exist with a helpful message. Asking the
user to run `spin plugin install` aligns with the current plugin model.

View File

@ -87,6 +87,17 @@ version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6"
[[package]]
name = "async-channel"
version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf46fee83e5ccffc220104713af3292ff9bc7c64c7de289f66dae8e38d826833"
dependencies = [
"concurrent-queue",
"event-listener",
"futures-core",
]
[[package]]
name = "async-compression"
version = "0.3.15"
@ -102,13 +113,13 @@ dependencies = [
[[package]]
name = "async-trait"
version = "0.1.63"
version = "0.1.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eff18d764974428cf3a9328e23fc5c986f5fbed46e6cd4cdf42544df5d297ec1"
checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.107",
"syn 2.0.11",
]
[[package]]
@ -128,6 +139,53 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "azure_core"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32568c56fda7f2f1173430298bddeb507ed44e99bd989ba1156a25534bff5d98"
dependencies = [
"async-trait",
"base64 0.21.0",
"bytes",
"dyn-clone",
"futures",
"getrandom 0.2.8",
"http-types",
"log",
"paste",
"pin-project",
"rand 0.8.5",
"reqwest",
"rustc_version",
"serde",
"serde_json",
"time",
"url",
"uuid",
]
[[package]]
name = "azure_data_cosmos"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73dede39a91e205b2050f250f6e31ed7c4c72be7ee694930c155c4d7636fe8e1"
dependencies = [
"async-trait",
"azure_core",
"bytes",
"futures",
"hmac",
"log",
"serde",
"serde_json",
"sha2 0.10.6",
"thiserror",
"time",
"url",
"uuid",
]
[[package]]
name = "base64"
version = "0.13.1"
@ -604,6 +662,15 @@ dependencies = [
"tokio-util 0.7.4",
]
[[package]]
name = "concurrent-queue"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62ec6771ecfa0762d24683ee5a32ad78487a3d3afdc0fb8cae19d2c5deb50b7c"
dependencies = [
"crossbeam-utils",
]
[[package]]
name = "core-foundation"
version = "0.9.3"
@ -1028,6 +1095,12 @@ version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0bd4b30a6560bbd9b4620f4de34c3f14f60848e58a9b7216801afcb4c7b31c3c"
[[package]]
name = "dyn-clone"
version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68b0cf012f1230e43cd00ebb729c6bb58707ecfa8ad08b52ef3a4ccd2697fc30"
[[package]]
name = "ed25519"
version = "1.5.3"
@ -1100,6 +1173,12 @@ dependencies = [
"libc",
]
[[package]]
name = "event-listener"
version = "2.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0"
[[package]]
name = "fallible-iterator"
version = "0.2.0"
@ -1288,9 +1367,9 @@ checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
[[package]]
name = "futures"
version = "0.3.25"
version = "0.3.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38390104763dc37a5145a53c29c63c1290b5d316d6086ec32c293f6736051bb0"
checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40"
dependencies = [
"futures-channel",
"futures-core",
@ -1303,9 +1382,9 @@ dependencies = [
[[package]]
name = "futures-channel"
version = "0.3.25"
version = "0.3.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52ba265a92256105f45b719605a571ffe2d1f0fea3807304b522c1d778f79eed"
checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2"
dependencies = [
"futures-core",
"futures-sink",
@ -1313,15 +1392,15 @@ dependencies = [
[[package]]
name = "futures-core"
version = "0.3.25"
version = "0.3.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04909a7a7e4633ae6c4a9ab280aeb86da1236243a77b694a49eacd659a4bd3ac"
checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c"
[[package]]
name = "futures-executor"
version = "0.3.25"
version = "0.3.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7acc85df6714c176ab5edf386123fafe217be88c0840ec11f199441134a074e2"
checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0"
dependencies = [
"futures-core",
"futures-task",
@ -1330,38 +1409,53 @@ dependencies = [
[[package]]
name = "futures-io"
version = "0.3.25"
version = "0.3.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00f5fb52a06bdcadeb54e8d3671f8888a39697dcb0b81b23b55174030427f4eb"
checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964"
[[package]]
name = "futures-lite"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce"
dependencies = [
"fastrand",
"futures-core",
"futures-io",
"memchr",
"parking",
"pin-project-lite",
"waker-fn",
]
[[package]]
name = "futures-macro"
version = "0.3.25"
version = "0.3.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bdfb8ce053d86b91919aad980c220b1fb8401a9394410e1c289ed7e66b61835d"
checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.107",
"syn 2.0.11",
]
[[package]]
name = "futures-sink"
version = "0.3.25"
version = "0.3.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39c15cf1a4aa79df40f1bb462fb39676d0ad9e366c2a33b590d7c66f4f81fcf9"
checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e"
[[package]]
name = "futures-task"
version = "0.3.25"
version = "0.3.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ffb393ac5d9a6eaa9d3fdf37ae2776656b706e200c8e16b1bdb227f5198e6ea"
checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65"
[[package]]
name = "futures-util"
version = "0.3.25"
version = "0.3.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "197676987abd2f9cadff84926f410af1c183608d36641465df73ae8211dc65d6"
checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533"
dependencies = [
"futures-channel",
"futures-core",
@ -1541,7 +1635,7 @@ dependencies = [
[[package]]
name = "host"
version = "0.0.0"
source = "git+https://github.com/fermyon/spin-componentize?rev=51c3fade751c4e364142719e42130943fd8b0a76#51c3fade751c4e364142719e42130943fd8b0a76"
source = "git+https://github.com/fermyon/spin-componentize?rev=b6d42fe41e5690844a661deb631d996a2b49debc#b6d42fe41e5690844a661deb631d996a2b49debc"
dependencies = [
"anyhow",
"async-trait",
@ -1556,6 +1650,18 @@ dependencies = [
"wasmtime",
]
[[package]]
name = "hrana-client-proto"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26f15d50a607f7f2cb8cb97cad7ae746f861139e8ebc425a8545195a556d6102"
dependencies = [
"anyhow",
"base64 0.21.0",
"serde",
"serde_json",
]
[[package]]
name = "http"
version = "0.2.8"
@ -1578,6 +1684,26 @@ dependencies = [
"pin-project-lite",
]
[[package]]
name = "http-types"
version = "2.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e9b187a72d63adbfba487f48095306ac823049cb504ee195541e91c7775f5ad"
dependencies = [
"anyhow",
"async-channel",
"base64 0.13.1",
"futures-lite",
"infer",
"pin-project-lite",
"rand 0.7.3",
"serde",
"serde_json",
"serde_qs",
"serde_urlencoded",
"url",
]
[[package]]
name = "httparse"
version = "1.8.0"
@ -1703,6 +1829,12 @@ dependencies = [
"serde",
]
[[package]]
name = "infer"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64e9829a50b42bb782c1df523f78d332fe371b10c661e78b7a3c34b0198e9fac"
[[package]]
name = "instant"
version = "0.1.12"
@ -1924,6 +2056,24 @@ dependencies = [
"winapi",
]
[[package]]
name = "libsql-client"
version = "0.24.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8861153820a4228a1261ee92138345f7e08c71e64a75c95217247427172f2ce8"
dependencies = [
"anyhow",
"async-trait",
"base64 0.21.0",
"hrana-client-proto",
"num-traits",
"reqwest",
"serde",
"serde_json",
"tracing",
"url",
]
[[package]]
name = "libsqlite3-sys"
version = "0.26.0"
@ -2247,6 +2397,15 @@ dependencies = [
"libc",
]
[[package]]
name = "num_threads"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44"
dependencies = [
"libc",
]
[[package]]
name = "oauth2"
version = "4.3.0"
@ -2367,7 +2526,7 @@ dependencies = [
[[package]]
name = "outbound-http"
version = "1.2.0-pre0"
version = "1.3.0-pre0"
dependencies = [
"anyhow",
"http",
@ -2382,7 +2541,7 @@ dependencies = [
[[package]]
name = "outbound-mysql"
version = "1.2.0-pre0"
version = "1.3.0-pre0"
dependencies = [
"anyhow",
"mysql_async",
@ -2397,7 +2556,7 @@ dependencies = [
[[package]]
name = "outbound-pg"
version = "1.2.0-pre0"
version = "1.3.0-pre0"
dependencies = [
"anyhow",
"native-tls",
@ -2412,7 +2571,7 @@ dependencies = [
[[package]]
name = "outbound-redis"
version = "1.2.0-pre0"
version = "1.3.0-pre0"
dependencies = [
"anyhow",
"redis",
@ -2423,6 +2582,12 @@ dependencies = [
"wit-bindgen-wasmtime",
]
[[package]]
name = "parking"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14f2252c834a40ed9bb5422029649578e63aa341ac401f74e719dd1afda8394e"
[[package]]
name = "parking_lot"
version = "0.11.2"
@ -3034,6 +3199,15 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]]
name = "rustc_version"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
dependencies = [
"semver",
]
[[package]]
name = "rustify"
version = "0.5.3"
@ -3207,9 +3381,9 @@ dependencies = [
[[package]]
name = "serde"
version = "1.0.152"
version = "1.0.163"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
checksum = "2113ab51b87a539ae008b5c6c02dc020ffa39afd2d83cffcb3f4eb2722cebec2"
dependencies = [
"serde_derive",
]
@ -3226,20 +3400,20 @@ dependencies = [
[[package]]
name = "serde_derive"
version = "1.0.152"
version = "1.0.163"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
checksum = "8c805777e3930c8883389c602315a24224bcc738b63905ef87cd1420353ea93e"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.107",
"syn 2.0.11",
]
[[package]]
name = "serde_json"
version = "1.0.91"
version = "1.0.96"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883"
checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1"
dependencies = [
"itoa",
"ryu",
@ -3255,6 +3429,17 @@ dependencies = [
"serde",
]
[[package]]
name = "serde_qs"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7715380eec75f029a4ef7de39a9200e0a63823176b759d055b613f5a87df6a6"
dependencies = [
"percent-encoding",
"serde",
"thiserror",
]
[[package]]
name = "serde_urlencoded"
version = "0.7.1"
@ -3326,6 +3511,15 @@ dependencies = [
"dirs",
]
[[package]]
name = "shellexpand"
version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da03fa3b94cc19e3ebfc88c4229c49d8f08cdbd1228870a45f0ffdf84988e14b"
dependencies = [
"dirs",
]
[[package]]
name = "shlex"
version = "1.1.0"
@ -3420,7 +3614,7 @@ checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
[[package]]
name = "spin-app"
version = "1.2.0-pre0"
version = "1.3.0-pre0"
dependencies = [
"anyhow",
"async-trait",
@ -3433,7 +3627,7 @@ dependencies = [
[[package]]
name = "spin-common"
version = "1.2.0-pre0"
version = "1.3.0-pre0"
dependencies = [
"anyhow",
"sha2 0.10.6",
@ -3443,7 +3637,7 @@ dependencies = [
[[package]]
name = "spin-componentize"
version = "0.1.0"
source = "git+https://github.com/fermyon/spin-componentize?rev=51c3fade751c4e364142719e42130943fd8b0a76#51c3fade751c4e364142719e42130943fd8b0a76"
source = "git+https://github.com/fermyon/spin-componentize?rev=b6d42fe41e5690844a661deb631d996a2b49debc#b6d42fe41e5690844a661deb631d996a2b49debc"
dependencies = [
"anyhow",
"wasm-encoder 0.26.0",
@ -3453,7 +3647,7 @@ dependencies = [
[[package]]
name = "spin-config"
version = "1.2.0-pre0"
version = "1.3.0-pre0"
dependencies = [
"anyhow",
"async-trait",
@ -3471,7 +3665,7 @@ dependencies = [
[[package]]
name = "spin-core"
version = "1.2.0-pre0"
version = "1.3.0-pre0"
dependencies = [
"anyhow",
"async-trait",
@ -3489,7 +3683,7 @@ dependencies = [
[[package]]
name = "spin-key-value"
version = "1.2.0-pre0"
version = "1.3.0-pre0"
dependencies = [
"anyhow",
"lru 0.9.0",
@ -3501,6 +3695,20 @@ dependencies = [
"wit-bindgen-wasmtime",
]
[[package]]
name = "spin-key-value-azure"
version = "0.1.0"
dependencies = [
"anyhow",
"azure_data_cosmos",
"futures",
"serde",
"spin-core",
"spin-key-value",
"tokio",
"url",
]
[[package]]
name = "spin-key-value-redis"
version = "0.1.0"
@ -3529,7 +3737,7 @@ dependencies = [
[[package]]
name = "spin-loader"
version = "1.2.0-pre0"
version = "1.3.0-pre0"
dependencies = [
"anyhow",
"async-trait",
@ -3549,6 +3757,7 @@ dependencies = [
"semver",
"serde",
"serde_json",
"shellexpand 3.1.0",
"spin-common",
"spin-manifest",
"tempfile",
@ -3561,7 +3770,7 @@ dependencies = [
[[package]]
name = "spin-manifest"
version = "1.2.0-pre0"
version = "1.3.0-pre0"
dependencies = [
"indexmap",
"serde",
@ -3569,9 +3778,47 @@ dependencies = [
"toml",
]
[[package]]
name = "spin-sqlite"
version = "1.3.0-pre0"
dependencies = [
"anyhow",
"spin-app",
"spin-core",
"spin-key-value",
"spin-world",
"tokio",
"wit-bindgen-wasmtime",
]
[[package]]
name = "spin-sqlite-inproc"
version = "1.3.0-pre0"
dependencies = [
"anyhow",
"once_cell",
"rand 0.8.5",
"rusqlite",
"spin-sqlite",
"spin-world",
"tokio",
]
[[package]]
name = "spin-sqlite-libsql"
version = "1.3.0-pre0"
dependencies = [
"anyhow",
"libsql-client",
"spin-sqlite",
"spin-world",
"sqlparser",
"tokio",
]
[[package]]
name = "spin-trigger"
version = "1.2.0-pre0"
version = "1.3.0-pre0"
dependencies = [
"anyhow",
"async-trait",
@ -3593,10 +3840,14 @@ dependencies = [
"spin-config",
"spin-core",
"spin-key-value",
"spin-key-value-azure",
"spin-key-value-redis",
"spin-key-value-sqlite",
"spin-loader",
"spin-manifest",
"spin-sqlite",
"spin-sqlite-inproc",
"spin-sqlite-libsql",
"tokio",
"toml",
"tracing",
@ -3606,11 +3857,20 @@ dependencies = [
[[package]]
name = "spin-world"
version = "1.2.0-pre0"
version = "1.3.0-pre0"
dependencies = [
"wasmtime",
]
[[package]]
name = "sqlparser"
version = "0.34.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37d3706eefb17039056234df6b566b0014f303f867f2656108334a55b8096f59"
dependencies = [
"log",
]
[[package]]
name = "stable_deref_trait"
version = "1.2.0"
@ -3773,6 +4033,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a561bf4617eebd33bca6434b988f39ed798e527f51a1e797d0ee4f61c0a38376"
dependencies = [
"itoa",
"libc",
"num_threads",
"serde",
"time-core",
"time-macros",
@ -4116,6 +4378,9 @@ name = "uuid"
version = "1.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "422ee0de9031b5b948b97a8fc04e3aa35230001a722ddd27943e0be31564ce4c"
dependencies = [
"getrandom 0.2.8",
]
[[package]]
name = "vaultrs"
@ -4149,6 +4414,12 @@ version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "waker-fn"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca"
[[package]]
name = "walkdir"
version = "2.3.2"
@ -4185,7 +4456,7 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "wasi-cap-std-sync"
version = "0.0.0"
source = "git+https://github.com/fermyon/spin-componentize?rev=51c3fade751c4e364142719e42130943fd8b0a76#51c3fade751c4e364142719e42130943fd8b0a76"
source = "git+https://github.com/fermyon/spin-componentize?rev=b6d42fe41e5690844a661deb631d996a2b49debc#b6d42fe41e5690844a661deb631d996a2b49debc"
dependencies = [
"anyhow",
"async-trait",
@ -4233,7 +4504,7 @@ dependencies = [
[[package]]
name = "wasi-common"
version = "0.0.0"
source = "git+https://github.com/fermyon/spin-componentize?rev=51c3fade751c4e364142719e42130943fd8b0a76#51c3fade751c4e364142719e42130943fd8b0a76"
source = "git+https://github.com/fermyon/spin-componentize?rev=b6d42fe41e5690844a661deb631d996a2b49debc#b6d42fe41e5690844a661deb631d996a2b49debc"
dependencies = [
"anyhow",
"async-trait",
@ -4778,7 +5049,7 @@ dependencies = [
"heck 0.4.0",
"proc-macro2",
"quote",
"shellexpand",
"shellexpand 2.1.2",
"syn 1.0.107",
"witx",
]

View File

@ -5,11 +5,9 @@ use lazy_static::lazy_static;
use spin_cli::build_info::*;
use spin_cli::commands::{
build::BuildCommand,
cloud::CloudCommands,
deploy::DeployCommand,
cloud::{CloudCommand, DeployCommand, LoginCommand},
doctor::DoctorCommand,
external::execute_external_subcommand,
login::LoginCommand,
new::{AddCommand, NewCommand},
plugins::PluginCommands,
registry::RegistryCommands,
@ -78,8 +76,7 @@ enum SpinApp {
New(NewCommand),
Add(AddCommand),
Up(UpCommand),
#[clap(subcommand)]
Cloud(CloudCommands),
Cloud(CloudCommand),
// acts as a cross-level subcommand shortcut -> `spin cloud deploy`
Deploy(DeployCommand),
// acts as a cross-level subcommand shortcut -> `spin cloud login`
@ -113,9 +110,9 @@ impl SpinApp {
Self::Up(cmd) => cmd.run().await,
Self::New(cmd) => cmd.run().await,
Self::Add(cmd) => cmd.run().await,
Self::Cloud(cmd) => cmd.run().await,
Self::Deploy(cmd) => cmd.run().await,
Self::Login(cmd) => cmd.run().await,
Self::Cloud(cmd) => cmd.run(SpinApp::command()).await,
Self::Deploy(cmd) => cmd.run(SpinApp::command()).await,
Self::Login(cmd) => cmd.run(SpinApp::command()).await,
Self::Registry(cmd) => cmd.run().await,
Self::Build(cmd) => cmd.run().await,
Self::Trigger(TriggerCommands::Http(cmd)) => cmd.run().await,

View File

@ -4,14 +4,10 @@
pub mod build;
/// Commands for publishing applications to the Fermyon Platform.
pub mod cloud;
/// Command to package and upload an application to the Fermyon Platform.
pub mod deploy;
/// Command for running the Spin Doctor.
pub mod doctor;
/// Commands for external subcommands (i.e. plugins)
pub mod external;
/// Command for logging into the Fermyon Platform.
pub mod login;
/// Command for creating a new application.
pub mod new;
/// Command for adding a plugin to Spin

View File

@ -1,24 +1,63 @@
use crate::commands::external::execute_external_subcommand;
use anyhow::Result;
use clap::Subcommand;
use clap::Args;
use super::deploy::DeployCommand;
use super::login::LoginCommand;
/// Commands for publishing applications to the Fermyon Platform.
#[derive(Subcommand, Debug)]
pub enum CloudCommands {
/// Package and upload an application to the Fermyon Platform.
Deploy(DeployCommand),
/// Log into the Fermyon Platform.
Login(LoginCommand),
#[derive(Debug, Args, PartialEq)]
#[clap(
about = "Package and upload an application to the Fermyon Platform",
allow_hyphen_values = true,
disable_help_flag = true
)]
pub struct DeployCommand {
/// All args to be passed through to the plugin
#[clap(hide = true)]
args: Vec<String>,
}
impl CloudCommands {
pub async fn run(self) -> Result<()> {
match self {
Self::Deploy(cmd) => cmd.run().await,
Self::Login(cmd) => cmd.run().await,
}
#[derive(Debug, Args, PartialEq)]
#[clap(
about = "Log into to the Fermyon Cloud.",
allow_hyphen_values = true,
disable_help_flag = true
)]
pub struct LoginCommand {
/// All args to be passed through to the plugin
#[clap(hide = true)]
args: Vec<String>,
}
#[derive(Debug, Args, PartialEq)]
#[clap(
about = "Commands for publishing applications to the Fermyon Cloud.",
allow_hyphen_values = true,
disable_help_flag = true
)]
pub struct CloudCommand {
/// All args to be passed through to the plugin
#[clap(hide = true)]
args: Vec<String>,
}
impl CloudCommand {
pub async fn run(self, app: clap::App<'_>) -> Result<()> {
let mut cmd = vec!["cloud".to_string()];
cmd.append(&mut self.args.clone());
execute_external_subcommand(cmd, app).await
}
}
impl DeployCommand {
pub async fn run(self, app: clap::App<'_>) -> Result<()> {
let mut cmd = vec!["cloud".to_string(), "deploy".to_string()];
cmd.append(&mut self.args.clone());
execute_external_subcommand(cmd, app).await
}
}
impl LoginCommand {
pub async fn run(self, app: clap::App<'_>) -> Result<()> {
let mut cmd = vec!["cloud".to_string(), "login".to_string()];
cmd.append(&mut self.args.clone());
execute_external_subcommand(cmd, app).await
}
}

View File

@ -1,884 +0,0 @@
use anyhow::ensure;
use anyhow::{anyhow, bail, Context, Result};
use bindle::Id;
use chrono::{DateTime, Utc};
use clap::Parser;
use cloud::client::{Client as CloudClient, ConnectionConfig};
use cloud_openapi::models::ChannelRevisionSelectionStrategy as CloudChannelRevisionSelectionStrategy;
use hippo::{Client, ConnectionInfo};
use hippo_openapi::models::ChannelRevisionSelectionStrategy;
use rand::Rng;
use semver::BuildMetadata;
use sha2::{Digest, Sha256};
use spin_common::{arg_parser::parse_kv, sloth};
use spin_http::routes::RoutePattern;
use spin_loader::bindle::BindleConnectionInfo;
use spin_loader::local::config::RawAppManifest;
use spin_loader::local::{assets, config, parent_dir};
use spin_manifest::ApplicationTrigger;
use spin_manifest::{HttpTriggerConfiguration, TriggerConfig};
use spin_trigger_http::AppInfo;
use tokio::fs;
use tracing::instrument;
use std::fs::File;
use std::io;
use std::io::{copy, Write};
use std::path::PathBuf;
use url::Url;
use uuid::Uuid;
use crate::{opts::*, parse_buildinfo};
use super::login::LoginCommand;
use super::login::LoginConnection;
const SPIN_DEPLOY_CHANNEL_NAME: &str = "spin-deploy";
const SPIN_DEFAULT_KV_STORE: &str = "default";
const BINDLE_REGISTRY_URL_PATH: &str = "api/registry";
/// Package and upload an application to the Fermyon Platform.
#[derive(Parser, Debug)]
#[clap(about = "Package and upload an application to the Fermyon Platform")]
pub struct DeployCommand {
/// The application to deploy. This may be a manifest (spin.toml) file, or a
/// directory containing a spin.toml file.
/// If omitted, it defaults to "spin.toml".
#[clap(
name = APP_MANIFEST_FILE_OPT,
short = 'f',
long = "from",
alias = "file",
default_value = DEFAULT_MANIFEST_FILE
)]
pub app_source: PathBuf,
/// Path to assemble the bindle before pushing (defaults to
/// a temporary directory)
#[clap(
name = STAGING_DIR_OPT,
long = "staging-dir",
short = 'd',
)]
pub staging_dir: Option<PathBuf>,
/// Disable attaching buildinfo
#[clap(
long = "no-buildinfo",
conflicts_with = BUILDINFO_OPT,
env = "SPIN_DEPLOY_NO_BUILDINFO"
)]
pub no_buildinfo: bool,
/// Build metadata to append to the bindle version
#[clap(
name = BUILDINFO_OPT,
long = "buildinfo",
parse(try_from_str = parse_buildinfo),
)]
pub buildinfo: Option<BuildMetadata>,
/// Deploy existing bindle if it already exists on bindle server
#[clap(short = 'e', long = "deploy-existing-bindle")]
pub redeploy: bool,
/// How long in seconds to wait for a deployed HTTP application to become
/// ready. The default is 60 seconds. Set it to 0 to skip waiting
/// for readiness.
#[clap(long = "readiness-timeout", default_value = "60")]
pub readiness_timeout_secs: u16,
/// Deploy to the Fermyon instance saved under the specified name.
/// If omitted, Spin deploys to the default unnamed instance.
#[clap(
name = "environment-name",
long = "environment-name",
env = DEPLOYMENT_ENV_NAME_ENV
)]
pub deployment_env_id: Option<String>,
/// Set a key/value pair (key=value) in the deployed application's
/// default store. Any existing value will be overwritten.
/// Can be used multiple times.
#[clap(long = "key-value", parse(try_from_str = parse_kv))]
pub key_values: Vec<(String, String)>,
}
impl DeployCommand {
pub async fn run(self) -> Result<()> {
let path = self.config_file_path()?;
// log in if config.json does not exist or cannot be read
let data = match fs::read_to_string(path.clone()).await {
Ok(d) => d,
Err(e) if e.kind() == io::ErrorKind::NotFound => {
match self.deployment_env_id {
Some(name) => {
// TODO: allow auto redirect to login preserving the name
eprintln!("You have no instance saved as '{}'", name);
eprintln!("Run `spin login --environment-name {}` to log in", name);
std::process::exit(1);
}
None => {
// log in, then read config
// TODO: propagate deployment id (or bail if nondefault?)
LoginCommand::parse_from(vec!["login"]).run().await?;
fs::read_to_string(path.clone()).await?
}
}
}
Err(e) => {
bail!("Could not log in: {}", e);
}
};
let mut login_connection: LoginConnection = serde_json::from_str(&data)?;
let expired = match has_expired(&login_connection) {
Ok(val) => val,
Err(err) => {
eprintln!("{}\n", err);
eprintln!("Run `spin login` to log in again");
std::process::exit(1);
}
};
if expired {
// if we have a refresh token available, let's try to refresh the token
match login_connection.refresh_token {
Some(refresh_token) => {
// Only Cloud has support for refresh tokens
let connection_config = ConnectionConfig {
url: login_connection.url.to_string(),
insecure: login_connection.danger_accept_invalid_certs,
token: login_connection.token.clone(),
};
let client = CloudClient::new(connection_config.clone());
match client
.refresh_token(login_connection.token, refresh_token)
.await
{
Ok(token_info) => {
login_connection.token = token_info.token;
login_connection.refresh_token = Some(token_info.refresh_token);
login_connection.expiration = Some(token_info.expiration);
// save new token info
let path = self.config_file_path()?;
std::fs::write(path, serde_json::to_string_pretty(&login_connection)?)?;
}
Err(e) => {
eprintln!("Failed to refresh token: {}", e);
match self.deployment_env_id {
Some(name) => {
eprintln!(
"Run `spin login --environment-name {}` to log in again",
name
);
}
None => {
eprintln!("Run `spin login` to log in again");
}
}
std::process::exit(1);
}
}
}
None => {
// session has expired and we have no way to refresh the token - log back in
match self.deployment_env_id {
Some(name) => {
// TODO: allow auto redirect to login preserving the name
eprintln!("Your login to this environment has expired");
eprintln!(
"Run `spin login --environment-name {}` to log in again",
name
);
std::process::exit(1);
}
None => {
LoginCommand::parse_from(vec!["login"]).run().await?;
let new_data = fs::read_to_string(path.clone()).await.context(
format!("Cannot find spin config at {}", path.to_string_lossy()),
)?;
login_connection = serde_json::from_str(&new_data)?;
}
}
}
}
}
let sloth_guard = sloth::warn_if_slothful(
2500,
format!("Checking status ({})\n", login_connection.url),
);
check_healthz(&login_connection.url).await?;
// Server has responded - we don't want to keep the sloth timer running.
drop(sloth_guard);
// TODO: we should have a smarter check in place here to determine the difference between Hippo and the Cloud APIs
if login_connection.bindle_url.is_some() {
self.deploy_hippo(login_connection).await
} else {
const DEVELOPER_CLOUD_FAQ: &str = "https://developer.fermyon.com/cloud/faq";
self.deploy_cloud(login_connection)
.await
.map_err(|e| anyhow!("{:?}\n\nLearn more at {}", e, DEVELOPER_CLOUD_FAQ))
}
}
fn app(&self) -> anyhow::Result<PathBuf> {
crate::manifest::resolve_file_path(&self.app_source)
}
// TODO: unify with login
fn config_file_path(&self) -> Result<PathBuf> {
let root = dirs::config_dir()
.context("Cannot find configuration directory")?
.join("fermyon");
let file_stem = match &self.deployment_env_id {
None => "config",
Some(id) => id,
};
let file = format!("{}.json", file_stem);
let path = root.join(file);
Ok(path)
}
async fn deploy_hippo(self, login_connection: LoginConnection) -> Result<()> {
let cfg_any = spin_loader::local::raw_manifest_from_file(&self.app()?).await?;
let cfg = cfg_any.into_v1();
ensure!(!cfg.components.is_empty(), "No components in spin.toml!");
let buildinfo = if !self.no_buildinfo {
match &self.buildinfo {
Some(i) => Some(i.clone()),
None => self.compute_buildinfo(&cfg).await.map(Option::Some)?,
}
} else {
None
};
let bindle_connection_info = BindleConnectionInfo::new(
login_connection.bindle_url.unwrap(),
login_connection.danger_accept_invalid_certs,
login_connection.bindle_username,
login_connection.bindle_password,
);
let bindle_id = self
.create_and_push_bindle(buildinfo, bindle_connection_info)
.await?;
let hippo_client = Client::new(ConnectionInfo {
url: login_connection.url.to_string(),
danger_accept_invalid_certs: login_connection.danger_accept_invalid_certs,
api_key: Some(login_connection.token),
});
let name = bindle_id.name().to_string();
// Create or update app
// TODO: this process involves many calls to Hippo. Should be able to update the channel
// via only `add_revision` if bindle naming schema is updated so bindles can be deterministically ordered by Hippo.
let channel_id = match self.get_app_id_hippo(&hippo_client, name.clone()).await {
Ok(app_id) => {
Client::add_revision(
&hippo_client,
name.clone(),
bindle_id.version_string().clone(),
)
.await?;
let existing_channel_id = self
.get_channel_id_hippo(
&hippo_client,
SPIN_DEPLOY_CHANNEL_NAME.to_string(),
app_id,
)
.await?;
let active_revision_id = self
.get_revision_id_hippo(
&hippo_client,
bindle_id.version_string().clone(),
app_id,
)
.await?;
Client::patch_channel(
&hippo_client,
existing_channel_id,
None,
None,
Some(ChannelRevisionSelectionStrategy::UseSpecifiedRevision),
None,
Some(active_revision_id),
None,
None,
)
.await
.context("Problem patching a channel in Hippo")?;
existing_channel_id
}
Err(_) => {
let range_rule = Some(bindle_id.version_string());
let app_id = Client::add_app(&hippo_client, name.clone(), name.clone())
.await
.context("Unable to create Hippo app")?;
Client::add_channel(
&hippo_client,
app_id,
String::from(SPIN_DEPLOY_CHANNEL_NAME),
None,
ChannelRevisionSelectionStrategy::UseRangeRule,
range_rule,
None,
None,
)
.await
.context("Problem creating a channel in Hippo")?
}
};
println!(
"Deployed {} version {}",
name.clone(),
bindle_id.version_string()
);
let channel = Client::get_channel_by_id(&hippo_client, &channel_id.to_string())
.await
.context("Problem getting channel by id")?;
let app_base_url = build_app_base_url(&channel.domain, &login_connection.url)?;
if let Ok(http_config) = HttpTriggerConfiguration::try_from(cfg.info.trigger.clone()) {
wait_for_ready(
&app_base_url,
&bindle_id.version_string(),
self.readiness_timeout_secs,
Destination::Platform,
)
.await;
print_available_routes(&app_base_url, &http_config.base, &cfg);
} else {
println!("Application is running at {}", channel.domain);
}
Ok(())
}
async fn deploy_cloud(self, login_connection: LoginConnection) -> Result<()> {
let connection_config = ConnectionConfig {
url: login_connection.url.to_string(),
insecure: login_connection.danger_accept_invalid_certs,
token: login_connection.token.clone(),
};
let client = CloudClient::new(connection_config.clone());
let cfg_any = spin_loader::local::raw_manifest_from_file(&self.app()?).await?;
let cfg = cfg_any.into_v1();
validate_cloud_app(&cfg)?;
match cfg.info.trigger {
ApplicationTrigger::Http(_) => {}
ApplicationTrigger::Redis(_) => bail!("Redis triggers are not supported"),
ApplicationTrigger::External(_) => bail!("External triggers are not supported"),
}
let buildinfo = if !self.no_buildinfo {
match &self.buildinfo {
Some(i) => Some(i.clone()),
// FIXME(lann): As a workaround for buggy partial bindle uploads,
// force a new bindle version on every upload.
None => Some(random_buildinfo()),
}
} else {
None
};
let su = Url::parse(login_connection.url.as_str())?;
let bindle_connection_info = BindleConnectionInfo::from_token(
su.join(BINDLE_REGISTRY_URL_PATH)?.to_string(),
login_connection.danger_accept_invalid_certs,
login_connection.token,
);
let bindle_id = self
.create_and_push_bindle(buildinfo, bindle_connection_info)
.await?;
let name = bindle_id.name().to_string();
println!("Deploying...");
// Create or update app
// TODO: this process involves many calls to Hippo. Should be able to update the channel
// via only `add_revision` if bindle naming schema is updated so bindles can be deterministically ordered by Hippo.
let channel_id = match self.get_app_id_cloud(&client, name.clone()).await {
Ok(app_id) => {
CloudClient::add_revision(
&client,
name.clone(),
bindle_id.version_string().clone(),
)
.await?;
let existing_channel_id = self
.get_channel_id_cloud(&client, SPIN_DEPLOY_CHANNEL_NAME.to_string(), app_id)
.await?;
let active_revision_id = self
.get_revision_id_cloud(&client, bindle_id.version_string().clone(), app_id)
.await?;
CloudClient::patch_channel(
&client,
existing_channel_id,
None,
Some(CloudChannelRevisionSelectionStrategy::UseSpecifiedRevision),
None,
Some(active_revision_id),
None,
)
.await
.context("Problem patching a channel")?;
for kv in self.key_values {
CloudClient::add_key_value_pair(
&client,
app_id,
SPIN_DEFAULT_KV_STORE.to_string(),
kv.0,
kv.1,
)
.await
.context("Problem creating key/value")?;
}
existing_channel_id
}
Err(_) => {
let app_id = CloudClient::add_app(&client, &name, &name)
.await
.context("Unable to create app")?;
// When creating the new app, InitialRevisionImport command is triggered
// which automatically imports all revisions from bindle into db
// therefore we do not need to call add_revision api explicitly here
let active_revision_id = self
.get_revision_id_cloud(&client, bindle_id.version_string().clone(), app_id)
.await?;
let channel_id = CloudClient::add_channel(
&client,
app_id,
String::from(SPIN_DEPLOY_CHANNEL_NAME),
CloudChannelRevisionSelectionStrategy::UseSpecifiedRevision,
None,
Some(active_revision_id),
)
.await
.context("Problem creating a channel")?;
for kv in self.key_values {
CloudClient::add_key_value_pair(
&client,
app_id,
SPIN_DEFAULT_KV_STORE.to_string(),
kv.0,
kv.1,
)
.await
.context("Problem creating key/value")?;
}
channel_id
}
};
let channel = CloudClient::get_channel_by_id(&client, &channel_id.to_string())
.await
.context("Problem getting channel by id")?;
let app_base_url = build_app_base_url(&channel.domain, &login_connection.url)?;
if let Ok(http_config) = HttpTriggerConfiguration::try_from(cfg.info.trigger.clone()) {
wait_for_ready(
&app_base_url,
&bindle_id.version_string(),
self.readiness_timeout_secs,
Destination::Cloud(connection_config.url),
)
.await;
print_available_routes(&app_base_url, &http_config.base, &cfg);
} else {
println!("Application is running at {}", channel.domain);
}
Ok(())
}
async fn compute_buildinfo(&self, cfg: &RawAppManifest) -> Result<BuildMetadata> {
let app_file = self.app()?;
let mut sha256 = Sha256::new();
let app_folder = parent_dir(&app_file)?;
for x in cfg.components.iter() {
match &x.source {
config::RawModuleSource::FileReference(p) => {
let full_path = app_folder.join(p);
let mut r = File::open(&full_path)
.with_context(|| anyhow!("Cannot open file {}", &full_path.display()))?;
copy(&mut r, &mut sha256)?;
}
config::RawModuleSource::Url(us) => sha256.update(us.digest.as_bytes()),
}
if let Some(files) = &x.wasm.files {
let exclude_files = x.wasm.exclude_files.clone().unwrap_or_default();
let fm = assets::collect(files, &exclude_files, &app_folder)?;
for f in fm.iter() {
let mut r = File::open(&f.src)
.with_context(|| anyhow!("Cannot open file {}", &f.src.display()))?;
copy(&mut r, &mut sha256)?;
}
}
}
let mut r = File::open(&app_file)?;
copy(&mut r, &mut sha256)?;
let mut final_digest = format!("q{:x}", sha256.finalize());
final_digest.truncate(8);
let buildinfo =
BuildMetadata::new(&final_digest).with_context(|| "Could not compute build info")?;
Ok(buildinfo)
}
async fn get_app_id_hippo(&self, hippo_client: &Client, name: String) -> Result<Uuid> {
let apps_vm = Client::list_apps(hippo_client).await?;
let app = apps_vm.items.iter().find(|&x| x.name == name.clone());
match app {
Some(a) => Ok(a.id),
None => bail!("No app with name: {}", name),
}
}
async fn get_app_id_cloud(&self, cloud_client: &CloudClient, name: String) -> Result<Uuid> {
let apps_vm = CloudClient::list_apps(cloud_client).await?;
let app = apps_vm.items.iter().find(|&x| x.name == name.clone());
match app {
Some(a) => Ok(a.id),
None => bail!("No app with name: {}", name),
}
}
async fn get_revision_id_hippo(
&self,
hippo_client: &Client,
bindle_version: String,
app_id: Uuid,
) -> Result<Uuid> {
let revisions = Client::list_revisions(hippo_client).await?;
let revision = revisions
.items
.iter()
.find(|&x| x.revision_number == bindle_version && x.app_id == app_id);
Ok(revision
.ok_or_else(|| {
anyhow!(
"No revision with version {} and app id {}",
bindle_version,
app_id
)
})?
.id)
}
async fn get_revision_id_cloud(
&self,
cloud_client: &CloudClient,
bindle_version: String,
app_id: Uuid,
) -> Result<Uuid> {
let mut revisions = cloud_client.list_revisions().await?;
loop {
if let Some(revision) = revisions
.items
.iter()
.find(|&x| x.revision_number == bindle_version && x.app_id == app_id)
{
return Ok(revision.id);
}
if revisions.is_last_page {
break;
}
revisions = cloud_client.list_revisions_next(&revisions).await?;
}
Err(anyhow!(
"No revision with version {} and app id {}",
bindle_version,
app_id
))
}
async fn get_channel_id_hippo(
&self,
hippo_client: &Client,
name: String,
app_id: Uuid,
) -> Result<Uuid> {
let channels_vm = Client::list_channels(hippo_client).await?;
let channel = channels_vm
.items
.iter()
.find(|&x| x.app_id == app_id && x.name == name.clone());
match channel {
Some(c) => Ok(c.id),
None => bail!("No channel with app_id {} and name {}", app_id, name),
}
}
async fn get_channel_id_cloud(
&self,
cloud_client: &CloudClient,
name: String,
app_id: Uuid,
) -> Result<Uuid> {
let mut channels_vm = cloud_client.list_channels().await?;
loop {
if let Some(channel) = channels_vm
.items
.iter()
.find(|&x| x.app_id == app_id && x.name == name.clone())
{
return Ok(channel.id);
}
if channels_vm.is_last_page {
break;
}
channels_vm = cloud_client.list_channels_next(&channels_vm).await?;
}
Err(anyhow!(
"No channel with app_id {} and name {}",
app_id,
name
))
}
async fn create_and_push_bindle(
&self,
buildinfo: Option<BuildMetadata>,
bindle_connection_info: BindleConnectionInfo,
) -> Result<Id> {
let temp_dir = tempfile::tempdir()?;
let dest_dir = match &self.staging_dir {
None => temp_dir.path(),
Some(path) => path.as_path(),
};
let bindle_id = spin_bindle::prepare_bindle(&self.app()?, buildinfo, dest_dir)
.await
.map_err(crate::wrap_prepare_bindle_error)?;
println!(
"Uploading {} version {}...",
bindle_id.name(),
bindle_id.version()
);
match spin_bindle::push_all(dest_dir, &bindle_id, bindle_connection_info.clone()).await {
Err(spin_bindle::PublishError::BindleAlreadyExists(err_msg)) => {
if self.redeploy {
Ok(bindle_id.clone())
} else {
Err(anyhow!(
"Failed to push bindle to server.\n{}\nTry using the --deploy-existing-bindle flag",
err_msg
))
}
}
Err(spin_bindle::PublishError::BindleClient(bindle::client::ClientError::Other(e)))
if e.to_string().contains("application exceeds") =>
{
Err(anyhow!(e.trim_start_matches("Unknown error: ").to_owned()))
}
Err(err) => Err(err).with_context(|| {
crate::push_all_failed_msg(dest_dir, bindle_connection_info.base_url())
}),
Ok(()) => Ok(bindle_id.clone()),
}
}
}
fn validate_cloud_app(app: &RawAppManifest) -> Result<()> {
ensure!(!app.components.is_empty(), "No components in spin.toml!");
for component in &app.components {
if let Some(invalid_store) = component
.wasm
.key_value_stores
.iter()
.flatten()
.find(|store| *store != "default")
{
bail!("Invalid store {invalid_store:?} for component {:?}. Cloud currently supports only the 'default' store.", component.id);
}
}
Ok(())
}
fn random_buildinfo() -> BuildMetadata {
let random_bytes: [u8; 4] = rand::thread_rng().gen();
let random_hex: String = random_bytes.iter().map(|b| format!("{:x}", b)).collect();
BuildMetadata::new(&format!("r{random_hex}")).unwrap()
}
fn build_app_base_url(app_domain: &str, hippo_url: &Url) -> Result<Url> {
// HACK: We assume that the scheme (https vs http) of apps will match that of Hippo...
let scheme = hippo_url.scheme();
Url::parse(&format!("{scheme}://{app_domain}/")).with_context(|| {
format!("Could not construct app base URL for {app_domain:?} (Hippo URL: {hippo_url:?})",)
})
}
async fn check_healthz(base_url: &Url) -> Result<()> {
let healthz_url = base_url.join("healthz")?;
reqwest::get(healthz_url)
.await?
.error_for_status()
.with_context(|| format!("Server {} is unhealthy", base_url))?;
Ok(())
}
const READINESS_POLL_INTERVAL_SECS: u64 = 2;
enum Destination {
Cloud(String),
Platform,
}
async fn wait_for_ready(
app_base_url: &Url,
bindle_version: &str,
readiness_timeout_secs: u16,
destination: Destination,
) {
if readiness_timeout_secs == 0 {
return;
}
let app_info_url = app_base_url
.join(spin_http::WELL_KNOWN_PREFIX.trim_start_matches('/'))
.unwrap()
.join("info")
.unwrap()
.to_string();
let start = std::time::Instant::now();
let readiness_timeout = std::time::Duration::from_secs(u64::from(readiness_timeout_secs));
let poll_interval = tokio::time::Duration::from_secs(READINESS_POLL_INTERVAL_SECS);
print!("Waiting for application to become ready");
let _ = std::io::stdout().flush();
loop {
match is_ready(&app_info_url, bindle_version).await {
Err(err) => {
println!("... readiness check failed: {err:?}");
return;
}
Ok(true) => {
println!("... ready");
return;
}
Ok(false) => {}
}
print!(".");
let _ = std::io::stdout().flush();
if start.elapsed() >= readiness_timeout {
println!();
println!("Application deployed, but Spin could not establish readiness");
if let Destination::Cloud(url) = destination {
println!("Check the Fermyon Cloud dashboard to see the application status: {url}");
}
return;
}
tokio::time::sleep(poll_interval).await;
}
}
#[instrument(level = "debug")]
async fn is_ready(app_info_url: &str, expected_version: &str) -> Result<bool> {
// If the request fails, we assume the app isn't ready
let resp = match reqwest::get(app_info_url).await {
Ok(resp) => resp,
Err(err) => {
tracing::warn!("Readiness check failed: {err:?}");
return Ok(false);
}
};
// If the response status isn't success, the app isn't ready
if !resp.status().is_success() {
tracing::debug!("App not ready: {}", resp.status());
return Ok(false);
}
// If the app was previously deployed then it will have an outdated bindle
// version, in which case the app isn't ready
if let Ok(app_info) = resp.json::<AppInfo>().await {
let active_version = app_info.bindle_version;
if active_version.as_deref() != Some(expected_version) {
tracing::debug!("Active version {active_version:?} != expected {expected_version:?}");
return Ok(false);
}
}
Ok(true)
}
fn print_available_routes(
app_base_url: &Url,
base: &str,
cfg: &spin_loader::local::config::RawAppManifest,
) {
if cfg.components.is_empty() {
return;
}
// Strip any trailing slash from base URL
let app_base_url = app_base_url.to_string();
let route_prefix = app_base_url.strip_suffix('/').unwrap_or(&app_base_url);
println!("Available Routes:");
for component in &cfg.components {
if let TriggerConfig::Http(http_cfg) = &component.trigger {
let route = RoutePattern::from(base, &http_cfg.route);
println!(" {}: {}{}", component.id, route_prefix, route);
if let Some(description) = &component.description {
println!(" {}", description);
}
}
}
}
// Check if the token has expired.
// If the expiration is None, assume the token has not expired
fn has_expired(login_connection: &LoginConnection) -> Result<bool> {
match &login_connection.expiration {
Some(expiration) => match DateTime::parse_from_rfc3339(expiration) {
Ok(time) => Ok(Utc::now() > time),
Err(err) => Err(anyhow!(
"Failed to parse token expiration time '{}'. Error: {}",
expiration,
err
)),
},
None => Ok(false),
}
}

View File

@ -1,7 +1,8 @@
use crate::build_info::*;
use crate::commands::plugins::{update, Install};
use crate::opts::PLUGIN_OVERRIDE_COMPATIBILITY_CHECK_FLAG;
use anyhow::{anyhow, Result};
use spin_plugins::{error::Error, manifest::warn_unsupported_version, PluginStore};
use spin_plugins::{error::Error as PluginError, manifest::warn_unsupported_version, PluginStore};
use std::{collections::HashMap, env, process};
use tokio::process::Command;
use tracing::log;
@ -48,11 +49,31 @@ pub async fn execute_external_subcommand(
process::exit(1);
}
}
Err(Error::NotFound(e)) => {
tracing::debug!("Tried to resolve {plugin_name} to plugin, got {e}");
terminal::error!("'{plugin_name}' is not a known Spin command. See spin --help.\n");
print_similar_commands(app, &plugin_name);
process::exit(2);
Err(PluginError::NotFound(e)) => {
if plugin_name == "cloud" {
println!("The `cloud` plugin is required. Installing now.");
let plugin_installer = Install {
name: Some("cloud".to_string()),
yes_to_all: true,
local_manifest_src: None,
remote_manifest_src: None,
override_compatibility_check: false,
version: None,
};
// Automatically update plugins if the cloud plugin manifest does not exist
// TODO: remove this eventually once very unlikely to not have updated
if let Err(e) = plugin_installer.run().await {
if let Some(PluginError::NotFound(_)) = e.downcast_ref::<PluginError>() {
update().await?;
}
plugin_installer.run().await?;
}
} else {
tracing::debug!("Tried to resolve {plugin_name} to plugin, got {e}");
terminal::error!("'{plugin_name}' is not a known Spin command. See spin --help.\n");
print_similar_commands(app, &plugin_name);
process::exit(2);
}
}
Err(e) => return Err(e.into()),
}

View File

@ -1,664 +0,0 @@
use std::io::{stdin, Write};
use std::path::PathBuf;
use std::time::Duration;
use anyhow::{bail, Context, Result};
use clap::Parser;
use cloud::client::{Client, ConnectionConfig};
use cloud_openapi::models::DeviceCodeItem;
use cloud_openapi::models::TokenInfo;
use hippo::Client as HippoClient;
use hippo::ConnectionInfo;
use serde::Deserialize;
use serde::Serialize;
use serde_json::json;
use tokio::fs;
use tracing::log;
use url::Url;
use uuid::Uuid;
use crate::opts::{
BINDLE_PASSWORD, BINDLE_SERVER_URL_OPT, BINDLE_URL_ENV, BINDLE_USERNAME,
DEPLOYMENT_ENV_NAME_ENV, HIPPO_PASSWORD, HIPPO_SERVER_URL_OPT, HIPPO_URL_ENV, HIPPO_USERNAME,
INSECURE_OPT, SPIN_AUTH_TOKEN, TOKEN,
};
// this is the client ID registered in the Cloud's backend
const SPIN_CLIENT_ID: &str = "583e63e9-461f-4fbe-a246-23e0fb1cad10";
const DEFAULT_CLOUD_URL: &str = "https://cloud.fermyon.com/";
/// Log into the Fermyon Platform.
#[derive(Parser, Debug)]
#[clap(about = "Log into the Fermyon Platform")]
pub struct LoginCommand {
/// URL of bindle server
#[clap(
name = BINDLE_SERVER_URL_OPT,
long = "bindle-server",
env = BINDLE_URL_ENV,
)]
pub bindle_server_url: Option<String>,
/// Basic http auth username for the bindle server
#[clap(
name = BINDLE_USERNAME,
long = "bindle-username",
env = BINDLE_USERNAME,
requires = BINDLE_PASSWORD
)]
pub bindle_username: Option<String>,
/// Basic http auth password for the bindle server
#[clap(
name = BINDLE_PASSWORD,
long = "bindle-password",
env = BINDLE_PASSWORD,
requires = BINDLE_USERNAME
)]
pub bindle_password: Option<String>,
/// Ignore server certificate errors from bindle and hippo
#[clap(
name = INSECURE_OPT,
short = 'k',
long = "insecure",
takes_value = false,
)]
pub insecure: bool,
/// URL of hippo server
#[clap(
name = HIPPO_SERVER_URL_OPT,
long = "url",
env = HIPPO_URL_ENV,
default_value = DEFAULT_CLOUD_URL,
value_parser = parse_url,
)]
pub hippo_server_url: url::Url,
/// Hippo username
#[clap(
name = HIPPO_USERNAME,
long = "username",
env = HIPPO_USERNAME,
requires = HIPPO_PASSWORD,
)]
pub hippo_username: Option<String>,
/// Hippo password
#[clap(
name = HIPPO_PASSWORD,
long = "password",
env = HIPPO_PASSWORD,
requires = HIPPO_USERNAME,
)]
pub hippo_password: Option<String>,
/// Auth Token
#[clap(
name = TOKEN,
long = "token",
env = SPIN_AUTH_TOKEN,
)]
pub token: Option<String>,
/// Display login status
#[clap(
name = "status",
long = "status",
takes_value = false,
conflicts_with = "list",
conflicts_with = "get-device-code",
conflicts_with = "check-device-code"
)]
pub status: bool,
// fetch a device code
#[clap(
name = "get-device-code",
long = "get-device-code",
takes_value = false,
hide = true,
conflicts_with = "status",
conflicts_with = "check-device-code"
)]
pub get_device_code: bool,
// check a device code
#[clap(
name = "check-device-code",
long = "check-device-code",
hide = true,
conflicts_with = "status",
conflicts_with = "get-device-code"
)]
pub check_device_code: Option<String>,
// authentication method used for logging in (username|github)
#[clap(
name = "auth-method",
long = "auth-method",
env = "AUTH_METHOD",
arg_enum
)]
pub method: Option<AuthMethod>,
/// Save the login details under the specified name instead of making them
/// the default. Use named environments with `spin deploy --environment-name <name>`.
#[clap(
name = "environment-name",
long = "environment-name",
env = DEPLOYMENT_ENV_NAME_ENV
)]
pub deployment_env_id: Option<String>,
/// List saved logins.
#[clap(
name = "list",
long = "list",
takes_value = false,
conflicts_with = "environment-name",
conflicts_with = "status",
conflicts_with = "get-device-code",
conflicts_with = "check-device-code"
)]
pub list: bool,
}
fn parse_url(url: &str) -> Result<url::Url> {
let mut url = Url::parse(url)
.map_err(|error| {
anyhow::format_err!(
"URL should be fully qualified in the format \"https://my-hippo-instance.com\". Error: {}", error
)
})?;
// Ensure path ends with '/' so join works properly
if !url.path().ends_with('/') {
url.set_path(&(url.path().to_string() + "/"));
}
Ok(url)
}
impl LoginCommand {
pub async fn run(&self) -> Result<()> {
match (
self.list,
self.status,
self.get_device_code,
&self.check_device_code,
) {
(true, false, false, None) => self.run_list().await,
(false, true, false, None) => self.run_status().await,
(false, false, true, None) => self.run_get_device_code().await,
(false, false, false, Some(device_code)) => {
self.run_check_device_code(device_code).await
}
(false, false, false, None) => self.run_interactive_login().await,
_ => Err(anyhow::anyhow!("Invalid combination of options")), // Should never happen
}
}
async fn run_list(&self) -> Result<()> {
let root = config_root_dir()?;
ensure(&root)?;
let json_file_stems = std::fs::read_dir(&root)
.with_context(|| format!("Failed to read config directory {}", root.display()))?
.filter_map(environment_name_from_path)
.collect::<Vec<_>>();
for s in json_file_stems {
println!("{}", s);
}
Ok(())
}
async fn run_status(&self) -> Result<()> {
let path = self.config_file_path()?;
let data = fs::read_to_string(&path)
.await
.context("Cannot display login information")?;
println!("{}", data);
Ok(())
}
async fn run_get_device_code(&self) -> Result<()> {
let connection_config = self.anon_connection_config();
let device_code_info = create_device_code(&Client::new(connection_config)).await?;
println!("{}", serde_json::to_string_pretty(&device_code_info)?);
Ok(())
}
async fn run_check_device_code(&self, device_code: &str) -> Result<()> {
let connection_config = self.anon_connection_config();
let client = Client::new(connection_config);
let token_readiness = match client.login(device_code.to_owned()).await {
Ok(token_info) => TokenReadiness::Ready(token_info),
Err(_) => TokenReadiness::Unready,
};
match token_readiness {
TokenReadiness::Ready(token_info) => {
println!("{}", serde_json::to_string_pretty(&token_info)?);
let login_connection = self.login_connection_for_token_info(token_info);
self.save_login_info(&login_connection)?;
}
TokenReadiness::Unready => {
let waiting = json!({ "status": "waiting" });
println!("{}", serde_json::to_string_pretty(&waiting)?);
}
}
Ok(())
}
async fn run_interactive_login(&self) -> Result<()> {
let login_connection = match self.auth_method() {
AuthMethod::Github => self.run_interactive_gh_login().await?,
AuthMethod::UsernameAndPassword => self.run_interactive_basic_login().await?,
AuthMethod::Token => self.login_using_token().await?,
};
self.save_login_info(&login_connection)
}
async fn login_using_token(&self) -> Result<LoginConnection> {
// check that the user passed in a token
let token = match self.token.clone() {
Some(t) => t,
None => return Err(anyhow::anyhow!(format!("No personal access token was provided. Please provide one using either ${} or --{}.", SPIN_AUTH_TOKEN, TOKEN.to_lowercase()))),
};
// Validate the token by calling list_apps API until we have a user info API
Client::new(ConnectionConfig {
url: self.hippo_server_url.to_string(),
insecure: self.insecure,
token: token.clone(),
})
.list_apps()
.await
.context("Login using the provided personal access token failed. Run `spin login` or create a new token using the Fermyon Cloud user interface.")?;
Ok(self.login_connection_for_token(token))
}
async fn run_interactive_gh_login(&self) -> Result<LoginConnection> {
// log in to the cloud API
let connection_config = self.anon_connection_config();
let token_info = github_token(connection_config).await?;
Ok(self.login_connection_for_token_info(token_info))
}
async fn run_interactive_basic_login(&self) -> Result<LoginConnection> {
let username = prompt_if_not_provided(&self.hippo_username, "Hippo username")?;
let password = match &self.hippo_password {
Some(password) => password.to_owned(),
None => {
print!("Hippo password: ");
std::io::stdout().flush()?;
rpassword::read_password()
.expect("unable to read user input")
.trim()
.to_owned()
}
};
let bindle_url = prompt_if_not_provided(&self.bindle_server_url, "Bindle URL")?;
// If Bindle URL was provided and Bindle username and password were not, assume Bindle
// is unauthenticated. If Bindle URL was prompted for, or Bindle username or password
// is provided, ask the user.
let mut bindle_username = self.bindle_username.clone();
let mut bindle_password = self.bindle_password.clone();
let unauthenticated_bindle_server_provided = self.bindle_server_url.is_some()
&& self.bindle_username.is_none()
&& self.bindle_password.is_none();
if !unauthenticated_bindle_server_provided {
let bindle_username_text = prompt_if_not_provided(
&self.bindle_username,
"Bindle username (blank for unauthenticated)",
)?;
bindle_username = if bindle_username_text.is_empty() {
None
} else {
Some(bindle_username_text)
};
bindle_password = match bindle_username {
None => None,
Some(_) => Some(prompt_if_not_provided(
&self.bindle_password,
"Bindle password",
)?),
};
}
// log in with username/password
let token = match HippoClient::login(
&HippoClient::new(ConnectionInfo {
url: self.hippo_server_url.to_string(),
danger_accept_invalid_certs: self.insecure,
api_key: None,
}),
username,
password,
)
.await
{
Ok(token_info) => token_info,
Err(err) => bail!(format_login_error(&err)?),
};
Ok(LoginConnection {
url: self.hippo_server_url.clone(),
danger_accept_invalid_certs: self.insecure,
token: token.token.unwrap_or_default(),
refresh_token: None,
expiration: token.expiration,
bindle_url: Some(bindle_url),
bindle_username,
bindle_password,
})
}
fn login_connection_for_token(&self, token: String) -> LoginConnection {
LoginConnection {
url: self.hippo_server_url.clone(),
danger_accept_invalid_certs: self.insecure,
token,
refresh_token: None,
expiration: None,
bindle_url: None,
bindle_username: None,
bindle_password: None,
}
}
fn login_connection_for_token_info(&self, token_info: TokenInfo) -> LoginConnection {
LoginConnection {
url: self.hippo_server_url.clone(),
danger_accept_invalid_certs: self.insecure,
token: token_info.token,
refresh_token: Some(token_info.refresh_token),
expiration: Some(token_info.expiration),
bindle_url: None,
bindle_username: None,
bindle_password: None,
}
}
fn config_file_path(&self) -> Result<PathBuf> {
let root = config_root_dir()?;
ensure(&root)?;
let file_stem = match &self.deployment_env_id {
None => "config",
Some(id) => id,
};
let file = format!("{}.json", file_stem);
let path = root.join(file);
Ok(path)
}
fn anon_connection_config(&self) -> ConnectionConfig {
ConnectionConfig {
url: self.hippo_server_url.to_string(),
insecure: self.insecure,
token: Default::default(),
}
}
fn auth_method(&self) -> AuthMethod {
if let Some(method) = &self.method {
method.clone()
} else if self.get_device_code || self.check_device_code.is_some() {
AuthMethod::Github
} else if self.hippo_username.is_some() || self.hippo_password.is_some() {
AuthMethod::UsernameAndPassword
} else if self.hippo_server_url.as_str() != DEFAULT_CLOUD_URL {
// prompt the user for the authentication method
// TODO: implement a server "feature" check that tells us what authentication methods it supports
prompt_for_auth_method()
} else if self.token.is_some() {
AuthMethod::Token
} else {
AuthMethod::Github
}
}
fn save_login_info(&self, login_connection: &LoginConnection) -> Result<(), anyhow::Error> {
let path = self.config_file_path()?;
std::fs::write(path, serde_json::to_string_pretty(login_connection)?)?;
Ok(())
}
}
fn config_root_dir() -> Result<PathBuf, anyhow::Error> {
let root = dirs::config_dir()
.context("Cannot find configuration directory")?
.join("fermyon");
Ok(root)
}
fn prompt_if_not_provided(provided: &Option<String>, prompt_text: &str) -> Result<String> {
match provided {
Some(value) => Ok(value.to_owned()),
None => {
print!("{}: ", prompt_text);
std::io::stdout().flush()?;
let mut input = String::new();
stdin()
.read_line(&mut input)
.expect("unable to read user input");
Ok(input.trim().to_owned())
}
}
}
async fn github_token(
connection_config: ConnectionConfig,
) -> Result<cloud_openapi::models::TokenInfo> {
let client = Client::new(connection_config);
// Generate a device code and a user code to activate it with
let device_code = create_device_code(&client).await?;
println!(
"\nCopy your one-time code:\n\n{}\n",
device_code.user_code.clone(),
);
println!(
"...and open the authorization page in your browser:\n\n{}\n",
device_code.verification_url.clone(),
);
// The OAuth library should theoretically handle waiting for the device to be authorized, but
// testing revealed that it doesn't work. So we manually poll every 10 seconds for fifteen minutes.
const POLL_INTERVAL_SECS: u64 = 10;
let mut seconds_elapsed = 0;
let timeout_seconds = 15 * 60;
// Loop while waiting for the device code to be authorized by the user
loop {
if seconds_elapsed > timeout_seconds {
bail!("Timed out waiting to authorize the device. Please execute `spin login` again and authorize the device with GitHub.");
}
match client.login(device_code.device_code.clone()).await {
Ok(response) => {
println!("Device authorized!");
return Ok(response);
}
Err(_) => {
println!("Waiting for device authorization...");
tokio::time::sleep(Duration::from_secs(POLL_INTERVAL_SECS)).await;
seconds_elapsed += POLL_INTERVAL_SECS;
}
};
}
}
async fn create_device_code(client: &Client) -> Result<DeviceCodeItem> {
client
.create_device_code(Uuid::parse_str(SPIN_CLIENT_ID)?)
.await
}
#[derive(Clone, Serialize, Deserialize)]
pub struct LoginConnection {
pub url: Url,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub bindle_url: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub bindle_username: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub bindle_password: Option<String>,
pub danger_accept_invalid_certs: bool,
pub token: String,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub refresh_token: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub expiration: Option<String>,
}
#[derive(Deserialize, Serialize)]
struct LoginHippoError {
title: String,
detail: String,
}
fn format_login_error(err: &anyhow::Error) -> anyhow::Result<String> {
let detail = match serde_json::from_str::<LoginHippoError>(err.to_string().as_str()) {
Ok(e) => {
if e.detail.ends_with(": ") {
e.detail.replace(": ", ".")
} else {
e.detail
}
}
Err(_) => err.to_string(),
};
Ok(format!("Problem logging into Hippo: {}", detail))
}
/// Ensure the root directory exists, or else create it.
fn ensure(root: &PathBuf) -> Result<()> {
log::trace!("Ensuring root directory {:?}", root);
if !root.exists() {
log::trace!("Creating configuration root directory `{}`", root.display());
std::fs::create_dir_all(root).with_context(|| {
format!(
"Failed to create configuration root directory `{}`",
root.display()
)
})?;
} else if !root.is_dir() {
bail!(
"Configuration root `{}` already exists and is not a directory",
root.display()
);
} else {
log::trace!(
"Using existing configuration root directory `{}`",
root.display()
);
}
Ok(())
}
/// The method by which to authenticate the login.
#[derive(clap::ArgEnum, Clone, Debug, Eq, PartialEq)]
pub enum AuthMethod {
#[clap(name = "github")]
Github,
#[clap(name = "username")]
UsernameAndPassword,
#[clap(name = "token")]
Token,
}
fn prompt_for_auth_method() -> AuthMethod {
loop {
// prompt the user for the authentication method
print!("What authentication method does this server support?\n\n1. Sign in with GitHub\n2. Sign in with a username and password\n\nEnter a number: ");
std::io::stdout().flush().unwrap();
let mut input = String::new();
stdin()
.read_line(&mut input)
.expect("unable to read user input");
match input.trim() {
"1" => {
return AuthMethod::Github;
}
"2" => {
return AuthMethod::UsernameAndPassword;
}
_ => {
println!("invalid input. Please enter either 1 or 2.");
}
}
}
}
enum TokenReadiness {
Ready(TokenInfo),
Unready,
}
fn environment_name_from_path(dir_entry: std::io::Result<std::fs::DirEntry>) -> Option<String> {
let json_ext = std::ffi::OsString::from("json");
let default_name = "(default)";
match dir_entry {
Err(_) => None,
Ok(de) => {
if is_file_with_extension(&de, &json_ext) {
de.path().file_stem().map(|stem| {
let s = stem.to_string_lossy().to_string();
if s == "config" {
default_name.to_owned()
} else {
s
}
})
} else {
None
}
}
}
}
fn is_file_with_extension(de: &std::fs::DirEntry, extension: &std::ffi::OsString) -> bool {
match de.file_type() {
Err(_) => false,
Ok(t) => {
if t.is_file() {
de.path().extension() == Some(extension)
} else {
false
}
}
}
}
#[test]
fn parse_url_ensures_trailing_slash() {
let url = parse_url("https://localhost:12345/foo/bar").unwrap();
assert_eq!(url.to_string(), "https://localhost:12345/foo/bar/");
}

View File

@ -104,11 +104,11 @@ pub struct Install {
}
impl Install {
pub async fn run(self) -> Result<()> {
let manifest_location = match (self.local_manifest_src, self.remote_manifest_src, self.name) {
(Some(path), None, None) => ManifestLocation::Local(path),
(None, Some(url), None) => ManifestLocation::Remote(url),
(None, None, Some(name)) => ManifestLocation::PluginsRepository(PluginLookup::new(&name, self.version)),
pub async fn run(&self) -> Result<()> {
let manifest_location = match (&self.local_manifest_src, &self.remote_manifest_src, &self.name) {
(Some(path), None, None) => ManifestLocation::Local(path.to_path_buf()),
(None, Some(url), None) => ManifestLocation::Remote(url.clone()),
(None, None, Some(name)) => ManifestLocation::PluginsRepository(PluginLookup::new(name, self.version.clone())),
_ => return Err(anyhow::anyhow!("For plugin lookup, must provide exactly one of: plugin name, url to manifest, local path to manifest")),
};
let manager = PluginManager::try_default()?;
@ -409,7 +409,7 @@ impl PluginDescriptor {
}
/// Updates the locally cached spin-plugins repository, fetching the latest plugins.
async fn update() -> Result<()> {
pub(crate) async fn update() -> Result<()> {
let manager = PluginManager::try_default()?;
let plugins_dir = manager.store().get_plugins_directory();
let url = plugins_repo_url()?;

View File

@ -5,30 +5,4 @@ pub(crate) mod opts;
mod watch_filter;
mod watch_state;
use anyhow::{anyhow, Result};
use semver::BuildMetadata;
use spin_bindle::PublishError;
use std::path::Path;
pub use crate::opts::HELP_ARGS_ONLY_TRIGGER_TYPE;
pub(crate) fn push_all_failed_msg(path: &Path, server_url: &str) -> String {
format!(
"Failed to push bindle from '{}' to the server at '{}'",
path.display(),
server_url
)
}
pub(crate) fn wrap_prepare_bindle_error(err: PublishError) -> anyhow::Error {
match err {
PublishError::MissingBuildArtifact(_) => {
anyhow!("{}\n\nPlease try to run `spin build` first", err)
}
e => anyhow!(e),
}
}
pub(crate) fn parse_buildinfo(buildinfo: &str) -> Result<BuildMetadata> {
Ok(BuildMetadata::new(buildinfo)?)
}

View File

@ -1,17 +1,6 @@
pub const DEFAULT_MANIFEST_FILE: &str = "spin.toml";
pub const APP_MANIFEST_FILE_OPT: &str = "APP_MANIFEST_FILE";
pub const BINDLE_SERVER_URL_OPT: &str = "BINDLE_SERVER_URL";
pub const BINDLE_URL_ENV: &str = "BINDLE_URL";
pub const BINDLE_USERNAME: &str = "BINDLE_USERNAME";
pub const BINDLE_PASSWORD: &str = "BINDLE_PASSWORD";
pub const BUILDINFO_OPT: &str = "BUILDINFO";
pub const INSECURE_OPT: &str = "INSECURE";
pub const STAGING_DIR_OPT: &str = "STAGING_DIR";
pub const HIPPO_SERVER_URL_OPT: &str = "HIPPO_SERVER_URL";
pub const HIPPO_URL_ENV: &str = "HIPPO_URL";
pub const HIPPO_USERNAME: &str = "HIPPO_USERNAME";
pub const HIPPO_PASSWORD: &str = "HIPPO_PASSWORD";
pub const DEPLOYMENT_ENV_NAME_ENV: &str = "FERMYON_DEPLOYMENT_ENVIRONMENT";
pub const BUILD_UP_OPT: &str = "UP";
pub const PLUGIN_NAME_OPT: &str = "PLUGIN_NAME";
pub const PLUGIN_REMOTE_PLUGIN_MANIFEST_OPT: &str = "REMOTE_PLUGIN_MANIFEST";
@ -20,8 +9,6 @@ pub const PLUGIN_ALL_OPT: &str = "ALL";
pub const PLUGIN_OVERRIDE_COMPATIBILITY_CHECK_FLAG: &str = "override-compatibility-check";
pub const HELP_ARGS_ONLY_TRIGGER_TYPE: &str = "provide-help-args-no-app";
pub const FROM_REGISTRY_OPT: &str = "REGISTRY_REFERENCE";
pub const TOKEN: &str = "TOKEN";
pub const SPIN_AUTH_TOKEN: &str = "SPIN_AUTH_TOKEN";
pub const WATCH_CLEAR_OPT: &str = "CLEAR";
pub const WATCH_DEBOUNCE_OPT: &str = "DEBOUNCE";
pub const WATCH_SKIP_BUILD_OPT: &str = "SKIP_BUILD";

View File

@ -23,301 +23,6 @@ mod integration_tests {
const SPIN_BINARY: &str = "./target/debug/spin";
// This module consist of all integration tests that require dependencies such as bindle-server, nomad, and Hippo.Web to be installed.
#[cfg(feature = "fermyon-platform")]
mod fermyon_platform {
use super::*;
use std::path::PathBuf;
use tempfile::TempDir;
use which::which;
const RUST_HTTP_HEADERS_ENV_ROUTES_TEST: &str = "tests/http/headers-env-routes-test";
const BINDLE_SERVER_BINARY: &str = "bindle-server";
const NOMAD_BINARY: &str = "nomad";
const HIPPO_BINARY: &str = "Hippo.Web";
const BINDLE_SERVER_PATH_ENV: &str = "SPIN_TEST_BINDLE_SERVER_PATH";
const BINDLE_SERVER_BASIC_AUTH_HTPASSWD_FILE: &str = "tests/http/htpasswd";
const HIPPO_BASIC_AUTH_USER: &str = "hippo-user";
const HIPPO_BASIC_AUTH_PASSWORD: &str = "topsecret";
// This assumes all tests have been previously compiled by the top-level build script.
#[tokio::test]
async fn test_dependencies() -> Result<()> {
which(get_process(BINDLE_SERVER_BINARY))
.with_context(|| format!("Can't find {}", get_process(BINDLE_SERVER_BINARY)))?;
which(get_process(NOMAD_BINARY))
.with_context(|| format!("Can't find {}", get_process(NOMAD_BINARY)))?;
which(get_process(HIPPO_BINARY))
.with_context(|| format!("Can't find {}", get_process(HIPPO_BINARY)))?;
Ok(())
}
#[tokio::test]
async fn test_spin_deploy() -> Result<()> {
// start the Bindle registry.
let config = BindleTestControllerConfig {
basic_auth_enabled: false,
};
let _nomad = NomadTestController::new().await?;
let bindle = BindleTestController::new(config).await?;
let hippo = HippoTestController::new(&bindle.url).await?;
// push the application to the registry using the Spin CLI.
run(
vec![
SPIN_BINARY,
"login",
"--bindle-server",
&bindle.url,
"--url",
&hippo.url,
"--username",
HIPPO_BASIC_AUTH_USER,
"--password",
HIPPO_BASIC_AUTH_PASSWORD,
],
None,
None,
)?;
run(
vec![
SPIN_BINARY,
"deploy",
"--file",
&format!(
"{}/{}",
RUST_HTTP_HEADERS_ENV_ROUTES_TEST, DEFAULT_MANIFEST_LOCATION
),
],
None,
None,
)?;
let apps_vm = hippo.client.list_apps().await?;
assert_eq!(apps_vm.items.len(), 1, "hippo apps: {apps_vm:?}");
Ok(())
}
/// Controller for running a Bindle server.
/// This assumes `bindle-server` is present in the path.
pub struct BindleTestController {
pub url: String,
pub server_cache: TempDir,
server_handle: Child,
}
/// Config for the BindleTestController
pub struct BindleTestControllerConfig {
pub basic_auth_enabled: bool,
}
impl BindleTestController {
pub async fn new(config: BindleTestControllerConfig) -> Result<BindleTestController> {
let server_cache = tempfile::tempdir()?;
let address = format!("127.0.0.1:{}", get_random_port()?);
let url = format!("http://{}/v1/", address);
let bindle_server_binary = std::env::var(BINDLE_SERVER_PATH_ENV)
.unwrap_or_else(|_| BINDLE_SERVER_BINARY.to_owned());
let auth_args = match config.basic_auth_enabled {
true => vec!["--htpasswd-file", BINDLE_SERVER_BASIC_AUTH_HTPASSWD_FILE],
false => vec!["--unauthenticated"],
};
let server_handle_result = Command::new(&bindle_server_binary)
.args(
[
&[
"-d",
server_cache.path().to_string_lossy().to_string().as_str(),
"-i",
address.as_str(),
],
auth_args.as_slice(),
]
.concat(),
)
.spawn();
let mut server_handle = match server_handle_result {
Ok(h) => Ok(h),
Err(e) => {
let is_path_explicit = std::env::var(BINDLE_SERVER_PATH_ENV).is_ok();
let context = match e.kind() {
std::io::ErrorKind::NotFound => {
if is_path_explicit {
format!(
"executing {}: is the path/filename correct?",
bindle_server_binary
)
} else {
format!(
"executing {}: is binary on PATH?",
bindle_server_binary
)
}
}
_ => format!("executing {}", bindle_server_binary),
};
Err(e).context(context)
}
}?;
wait_tcp(&address, &mut server_handle, BINDLE_SERVER_BINARY).await?;
Ok(Self {
url,
server_handle,
server_cache,
})
}
}
impl Drop for BindleTestController {
fn drop(&mut self) {
let _ = self.server_handle.kill();
}
}
/// Controller for running Nomad.
pub struct NomadTestController {
pub url: String,
nomad_handle: Child,
}
impl NomadTestController {
pub async fn new() -> Result<NomadTestController> {
let url = "127.0.0.1:4646".to_string();
let mut nomad_handle = Command::new(get_process(NOMAD_BINARY))
.args(["agent", "-dev"])
.spawn()
.with_context(|| "executing nomad")?;
wait_tcp(&url, &mut nomad_handle, NOMAD_BINARY).await?;
Ok(Self { url, nomad_handle })
}
}
impl Drop for NomadTestController {
fn drop(&mut self) {
let _ = self.nomad_handle.kill();
}
}
/// Controller for running Hippo.
pub struct HippoTestController {
pub url: String,
pub client: hippo::Client,
hippo_handle: Child,
}
impl HippoTestController {
pub async fn new(bindle_url: &str) -> Result<HippoTestController> {
let url = format!("http://127.0.0.1:{}", get_random_port()?);
let mut hippo_handle = Command::new(get_process(HIPPO_BINARY))
.env("ASPNETCORE_URLS", &url)
.env("Nomad__Driver", "raw_exec")
.env("Nomad__Datacenters__0", "dc1")
.env("Database__Driver", "inmemory")
.env("ConnectionStrings__Bindle", format!("Address={bindle_url}"))
.env("Jwt__Key", "ceci n'est pas une jeton")
.env("Jwt__Issuer", "localhost")
.env("Jwt__Audience", "localhost")
.spawn()
.with_context(|| "executing hippo")?;
wait_hippo(&url, &mut hippo_handle, HIPPO_BINARY).await?;
let client = hippo::Client::new(hippo::ConnectionInfo {
url: url.clone(),
danger_accept_invalid_certs: true,
api_key: None,
});
client
.register(
HIPPO_BASIC_AUTH_USER.into(),
HIPPO_BASIC_AUTH_PASSWORD.into(),
)
.await?;
let token_info = client
.login(
HIPPO_BASIC_AUTH_USER.into(),
HIPPO_BASIC_AUTH_PASSWORD.into(),
)
.await?;
let client = hippo::Client::new(hippo::ConnectionInfo {
url: url.clone(),
danger_accept_invalid_certs: true,
api_key: token_info.token,
});
Ok(Self {
url,
client,
hippo_handle,
})
}
}
impl Drop for HippoTestController {
fn drop(&mut self) {
let _ = self.hippo_handle.kill();
}
}
async fn wait_hippo(url: &str, process: &mut Child, target: &str) -> Result<()> {
println!("hippo url is {} and process is {:?}", url, process);
let mut wait_count = 0;
loop {
if wait_count >= 120 {
panic!(
"Ran out of retries waiting for {} to start on URL {}",
target, url
);
}
if let Ok(Some(_)) = process.try_wait() {
panic!(
"Process exited before starting to serve {} to start on URL {}",
target, url
);
}
if let Ok(rsp) = reqwest::get(format!("{url}/healthz")).await {
if rsp.status().is_success() {
break;
}
}
wait_count += 1;
sleep(Duration::from_secs(1)).await;
}
Ok(())
}
struct AutoDeleteFile {
pub path: PathBuf,
}
impl Drop for AutoDeleteFile {
fn drop(&mut self) {
std::fs::remove_file(&self.path).unwrap();
}
}
}
#[cfg(feature = "outbound-redis-tests")]
mod outbound_redis_tests {
use super::*;
@ -902,6 +607,41 @@ route = "/..."
Ok(())
}
// TODO: Test on Windows
#[cfg(not(target_os = "windows"))]
#[test]
fn test_cloud_plugin_install() -> Result<()> {
// Create a temporary directory for plugin source and manifests
let temp_dir = tempdir()?;
let dir = temp_dir.path();
let installed_plugins_dir = dir.join("tmp");
// Ensure that spin installs the plugins into the temporary directory
let mut env_map: HashMap<&str, &str> = HashMap::new();
env_map.insert(
"TEST_PLUGINS_DIRECTORY",
installed_plugins_dir.to_str().unwrap(),
);
// `spin login --help` should cause the `cloud` plugin to be installed
let args = vec![SPIN_BINARY, "login", "--help"];
// Execute example plugin which writes "This is an example Spin plugin!" to a specified file
let output = run(args, None, Some(env_map.clone()))?;
// Ensure plugin is installed
assert!(std::str::from_utf8(&output.stdout)?
.trim()
.contains("The `cloud` plugin is required. Installing now."));
// Ensure login help info is displayed
assert!(std::str::from_utf8(&output.stdout)?
.trim()
.contains("Login to Fermyon Cloud"));
Ok(())
}
#[tokio::test]
async fn test_build_command() -> Result<()> {
do_test_build_command("tests/build/simple").await