Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: fixed taskfile example not working with cluster setup partially installed #978

Draft
wants to merge 18 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
196 changes: 165 additions & 31 deletions HelperTasks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,73 @@ vars:
node_list_internal: "{{range $idx, $n := .nodes }}node{{add $n 1}},{{end}}"
node_identifiers: "{{ .node_list_internal | trimSuffix \",\" }}"

solo_user_dir: "{{ env \"HOME\" }}/.solo"
solo_user_dir: "{{ .solo_home_override_dir | default (printf \"%s/.solo\" (env \"HOME\")) }}"
solo_cache_dir: "{{ .solo_user_dir }}/cache"
solo_logs_dir: "{{ .solo_user_dir }}/logs"
solo_keys_dir: "{{ .solo_cache_dir }}/keys"
solo_bin_dir: "{{ .solo_user_dir }}/bin"
run_build_file:
sh: (echo "/tmp/run-build-$(date +%Y%m%d%H%M%S)")
var_check_file:
sh: (echo "/tmp/var-check-$(date +%Y%m%d%H%M%S)")
minio_flag_file:
sh: (echo "/tmp/minio-flag-$(date +%Y%m%d%H%M%S)")

# TODO: test local build path
# TODO: make port forwards optional, doesn't work in Alex's multiple users on the same machine setup

env:
SOLO_CLUSTER_SETUP_NAMESPACE: solo-setup
SOLO_CLUSTER_RELEASE_NAME: solo-cluster-setup
SOLO_CLUSTER_NAME: solo-cluster
MIRROR_RELEASE_NAME: mirror

tasks:
init:
cmds:
- task: "var:check"
- task: "run:build"

var:check:
silent: true
status:
- test -f {{ .var_check_file }}
requires:
vars:
- solo_user_dir
- solo_cache_dir
- solo_logs_dir
- solo_keys_dir
- solo_bin_dir
- nodes
- node_list_internal
- node_identifiers
- run_build_file
- SOLO_CHART_VERSION
- CONSENSUS_NODE_VERSION
- SOLO_NAMESPACE
- SOLO_CLUSTER_SETUP_NAMESPACE
- SOLO_CLUSTER_RELEASE_NAME
- SOLO_NETWORK_SIZE
- SOLO_CLUSTER_NAME
- MIRROR_RELEASE_NAME
cmds:
- echo "Checking variables..."
- echo "solo_user_dir={{ .solo_user_dir }}"
- echo "SOLO_HOME=${SOLO_HOME}"
- echo "SOLO_NETWORK_SIZE=${SOLO_NETWORK_SIZE}"
- echo "SOLO_CHART_VERSION=${SOLO_CHART_VERSION}"
- echo "CONSENSUS_NODE_VERSION=${CONSENSUS_NODE_VERSION}"
- echo "SOLO_NAMESPACE=${SOLO_NAMESPACE}"
- echo "nodes={{ .nodes }}"
- echo "node_identifiers={{ .node_identifiers }}"
- echo "VALUES_FLAG=${VALUES_FLAG}"
- echo "SETTINGS_FLAG=${SETTINGS_FLAG}"
- echo "LOG4J2_FLAG=${LOG4J2_FLAG}"
- echo "APPLICATION_PROPERTIES_FLAG=${APPLICATION_PROPERTIES_FLAG}"
- echo "LOCAL_BUILD_FLAG=${LOCAL_BUILD_FLAG}"
- touch {{ .var_check_file }}

readme:
silent: true
cmds:
Expand All @@ -31,12 +91,9 @@ tasks:

install:solo:
internal: true
status:
- command -v solo
cmds:
- npm install -g @hashgraph/solo
- cd ../..
- npm link
- cd ..
- npm install

install:kubectl:darwin:
internal: true
Expand All @@ -61,6 +118,9 @@ tasks:

solo:init:
internal: true
silent: true
deps:
- task: "init"
status:
- test -f {{ .solo_bin_dir }}/helm
- test -f {{ .solo_cache_dir }}/profiles/custom-spec.yaml
Expand All @@ -72,8 +132,7 @@ tasks:
#- test "$(yq -r '.flags."node-ids"' < {{ .solo_user_dir }}/solo.yaml)" == "{{ .node_identifiers }}"
- test "$(jq -r '.flags."node-ids"' < {{ .solo_user_dir }}/solo.config)" == "{{ .node_identifiers }}"
cmds:
- npm run build
- solo init
- SOLO_HOME_DIR=${SOLO_HOME_DIR} npm run solo -- init

solo:keys:
internal: true
Expand All @@ -85,57 +144,59 @@ tasks:
test -f {{ .solo_keys_dir }}/s-public-node${n}.pem
test -f {{ .solo_keys_dir }}/s-private-node${n}.pem
done
deps:
- task: "init"
cmds:
- npm run build
- solo node keys --gossip-keys --tls-keys --node-aliases-unparsed {{.node_identifiers}}
- SOLO_HOME_DIR=${SOLO_HOME_DIR} npm run solo -- node keys --gossip-keys --tls-keys --node-aliases-unparsed {{.node_identifiers}} -q

solo:network:deploy:
internal: true
deps:
- task: "init"
cmds:
- npm run build
- solo network deploy --namespace "${SOLO_NAMESPACE}" --node-aliases-unparsed {{.node_identifiers}} --release-tag "${CONSENSUS_NODE_VERSION}" --solo-chart-version "${SOLO_CHART_VERSION}"
- solo node setup --namespace "${SOLO_NAMESPACE}" --node-aliases-unparsed {{.node_identifiers}} --release-tag "${CONSENSUS_NODE_VERSION}"
- SOLO_HOME_DIR=${SOLO_HOME_DIR} npm run solo -- network deploy --namespace "${SOLO_NAMESPACE}" --node-aliases-unparsed {{.node_identifiers}} --release-tag "${CONSENSUS_NODE_VERSION}" --solo-chart-version "${SOLO_CHART_VERSION}" ${VALUES_FLAG} ${SETTINGS_FLAG} ${LOG4J2_FLAG} ${APPLICATION_PROPERTIES_FLAG} -q
- SOLO_HOME_DIR=${SOLO_HOME_DIR} npm run solo -- node setup --namespace "${SOLO_NAMESPACE}" --node-aliases-unparsed {{.node_identifiers}} --release-tag "${CONSENSUS_NODE_VERSION}" ${LOCAL_BUILD_FLAG} -q

solo:network:destroy:
internal: true
deps:
- task: "init"
cmds:
- npm run build
- solo network destroy --namespace "${SOLO_NAMESPACE}" --delete-pvcs --delete-secrets --force
- SOLO_HOME_DIR=${SOLO_HOME_DIR} npm run solo -- network destroy --namespace "${SOLO_NAMESPACE}" --delete-pvcs --delete-secrets --force -q

solo:node:start:
internal: true
deps:
- task: "init"
cmds:
- npm run build
- solo node start --namespace "${SOLO_NAMESPACE}" --node-aliases-unparsed {{.node_identifiers}} {{ .CLI_ARGS }}
- SOLO_HOME_DIR=${SOLO_HOME_DIR} npm run solo -- node start --namespace "${SOLO_NAMESPACE}" --node-aliases-unparsed {{.node_identifiers}} -q {{ .CLI_ARGS }}
- kubectl port-forward -n "${SOLO_NAMESPACE}" svc/haproxy-node1-svc 50211:50211 &
- task: "sleep_after_port_forward"

solo:node:stop:
internal: true
ignore_error: true
deps:
- task: "init"
cmds:
- npm run build
- solo node stop --namespace "${SOLO_NAMESPACE}" --node-aliases-unparsed {{.node_identifiers}} {{ .CLI_ARGS }}

solo:node:addresses:
internal: true
cmds:
- kubectl get svc -n "${SOLO_NAMESPACE}" -l "solo.hedera.com/type=network-node-svc"
- SOLO_HOME_DIR=${SOLO_HOME_DIR} npm run solo -- node stop --namespace "${SOLO_NAMESPACE}" --node-aliases-unparsed {{.node_identifiers}} -q {{ .CLI_ARGS }}

solo:relay:
deps:
- task: "init"
cmds:
- npm run build
- solo relay deploy -n "${SOLO_NAMESPACE}" -i node1
- SOLO_HOME_DIR=${SOLO_HOME_DIR} npm run solo -- relay deploy -n "${SOLO_NAMESPACE}" -i node1 -q
- echo "Enable port forwarding for Hedera JSON RPC Relay"
- kubectl port-forward -n "${SOLO_NAMESPACE}" svc/relay-node1-hedera-json-rpc-relay 7546:7546 &
- task: "sleep_after_port_forward"

solo:destroy-relay:
status:
- helm list -n "${SOLO_NAMESPACE}" | grep -vqz relay-node1
deps:
- task: "init"
cmds:
- npm run build
- solo relay destroy -n "${SOLO_NAMESPACE}" -i node1
- SOLO_HOME_DIR=${SOLO_HOME_DIR} npm run solo -- relay destroy -n "${SOLO_NAMESPACE}" -i node1 -q

solo:cache:remove:
internal: true
Expand Down Expand Up @@ -163,22 +224,95 @@ tasks:
- kind get clusters | grep -q "${SOLO_CLUSTER_NAME}"
cmds:
- kind create cluster -n "${SOLO_CLUSTER_NAME}" --image "${KIND_IMAGE}"
- sleep 10 # wait for control plane to come up

cluster:setup:
deps:
- task: "init"
cmds:
- npm run build
- solo cluster setup --cluster-setup-namespace "${SOLO_CLUSTER_SETUP_NAMESPACE}"
- SOLO_HOME_DIR=${SOLO_HOME_DIR} npm run solo -- cluster setup --cluster-setup-namespace "${SOLO_CLUSTER_SETUP_NAMESPACE}" -q

cluster:destroy:
cmds:
- kind delete cluster --name "${SOLO_CLUSTER_NAME}"

clean:port-forward:
cmds:
- pkill -f "kubectl port-forward -n {{ .SOLO_NAMESPACE }}" || true
- pkill -f "kubectl port-forward -n {{ .SOLO_NAMESPACE }}" | grep ${UID} || true

sleep_after_port_forward:
cmds:
# somehow without the sleep, when port-forward is the last command of a series of tasks, port-forward
# prematurely killed when task is exiting
- sleep 4

run:build:
silent: true
status:
- test -f {{ .run_build_file }}
cmds:
- npm run build
- touch {{ .run_build_file }}

solo:cluster:minio:
internal: true
silent: true
cmds:
- |
if ! kubectl get svc -l app.kubernetes.io/instance=minio-operator --all-namespaces --no-headers | grep -q . ; then
echo "No services found with label app.kubernetes.io/name=operator app.kubernetes.io/instance=minio-operator"
echo "--minio" > {{ .minio_flag_file }}
else
echo "--no-minio" > {{ .minio_flag_file }}
fi

solo:cluster:setup:
silent: true
deps:
- task: "init"
- task: "solo:cluster:minio"
status:
- helm list --all-namespaces | grep -qz "${SOLO_CLUSTER_RELEASE_NAME}"
cmds:
- |
export MINIO_FLAG=$(cat {{ .minio_flag_file }})
SOLO_HOME_DIR=${SOLO_HOME_DIR} npm run solo -- cluster setup --cluster-setup-namespace "${SOLO_CLUSTER_SETUP_NAMESPACE}" ${MINIO_FLAG} -q

solo:node:addresses:
internal: true
cmds:
- kubectl get svc -n "${SOLO_NAMESPACE}" -l "solo.hedera.com/type=network-node-svc" --output=go-template-file={{ .ip_list_template_file }}

start:
desc: solo node start
deps:
- task: "init"
cmds:
- task: "solo:node:start"

stop:
desc: solo node stop
deps:
- task: "init"
cmds:
- task: "solo:node:stop"

show:ips:
deps:
- task: "init"
cmds:
- task: "solo:node:addresses"

clean:cache:
desc: remove solo cache directory
deps:
- task: "init"
cmds:
- task: "solo:cache:remove"

clean:logs:
desc: remove solo logs director
deps:
- task: "init"
cmds:
- task: "solo:logs:remove"
Loading
Loading