🚀 testing24 is live
This is the EasyDeploy template app. Replace it with your own code — everything else is already wired up.
Your endpoints & services
Dev URL
testing24-dev.easy-deploy.135.181.177.246.nip.io
Prod URL
testing24.easy-deploy.135.181.177.246.nip.io
ArgoCD
Deployment status & history
Grafana dashboard
Metrics, logs, traces
Infisical — project: testing24
Environment variables & secrets
GitHub repo
https://github.com/easydeploytest/testing24
Deploy a new app
Create another app from the portal
Deploy your app
1
Add a
/healthz endpoint
Must return HTTP 200. That's the only platform requirement.
# Node.js
if (req.url === '/healthz') {
res.writeHead(200); res.end('{"status":"ok"}'); return;
}
# Python / FastAPI
@app.get("/healthz")
def health(): return {"status": "ok"}
# Go
mux.HandleFunc("/healthz", func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte(`{"status":"ok"}`))
})
2
Clone this repo and copy your code in
This repo is already wired up — CI, ArgoCD, Infisical,
app.yaml. Just bring your source files.
git clone https://github.com/easydeploytest/testing24 cd testing24 # copy your src/ and Dockerfile from your existing project # do NOT copy app.yaml — it's already configured
Environment variables & secrets
Secrets are managed in Infisical, not in code or CI. They are injected into your pods as environment variables automatically — no redeploy needed when you change them (updated within ~5 minutes).
1
Open Infisical
Go to https://infisical.easy-deploy.135.181.177.246.nip.io → project testing24
2
Add secrets per environment
Use the dev environment for dev deployments and prod for production. Secrets in each environment are scoped —
prod secrets are never visible in dev pods.3
Use them in your app
Read them as normal environment variables:
process.env.MY_SECRET / os.environ["MY_SECRET"] / os.Getenv("MY_SECRET")Observability — OpenTelemetry setup
The OTel Operator is installed. For Node.js, Java, Python, and .NET the operator injects the agent into your pod automatically — no code changes needed. For Go and Bun auto-injection is not supported; add the SDK manually instead.
The following env vars are pre-set in every pod:
| Variable | Value | Description |
|---|---|---|
| OTEL_SERVICE_NAME | testing24 | Auto-set by Helm chart |
| OTEL_EXPORTER_OTLP_ENDPOINT | cluster-internal collector | Auto-set by Helm chart |
| OTEL_EXPORTER_OTLP_PROTOCOL | http/protobuf | Auto-set by Helm chart |
Auto-instrumented. The operator already injects the Node.js OTel agent — HTTP, DB, and outgoing requests are traced with zero code changes.
Structured logs (stdout → Loki)
const log = (level, message, extra = {}) =>
console.log(JSON.stringify({ level, message, app: process.env.OTEL_SERVICE_NAME, ...extra }))
log('info', 'server started', { port: 3000 })
log('error', 'something failed', { error: err.message })
Custom spans (optional)
const { trace, SpanStatusCode } = require('@opentelemetry/api');
const tracer = trace.getTracer('testing24');
async function myOp() {
return tracer.startActiveSpan('my-op', async span => {
try {
// ... your code
} catch (err) {
span.recordException(err);
span.setStatus({ code: SpanStatusCode.ERROR });
throw err;
} finally {
span.end();
}
});
}
Auto-instrumented. Set
otel.runtime: java in app.yaml — the operator injects the Java agent jar. No code changes needed.
In
app.yamlname: testing24 port: 8080 otel: runtime: java
Structured logs (stdout → Loki)
import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.Map;
var mapper = new ObjectMapper();
System.out.println(mapper.writeValueAsString(Map.of(
"level", "info", "message", "server started",
"app", System.getenv("OTEL_SERVICE_NAME")
)));
Auto-instrumented. Set
otel.runtime: python in app.yaml — the operator wraps your process with opentelemetry-instrument. No code changes needed.
In
app.yamlname: testing24 port: 3000 otel: runtime: python
Structured logs (stdout → Loki)
import json, os
def log(level, message, **extra):
print(json.dumps({"level": level, "message": message,
"app": os.environ.get("OTEL_SERVICE_NAME"), **extra}))
log("info", "server started", port=3000)
Manual SDK required. Bun is not compatible with the Node.js OTel operator injector. Add the SDK to your code and set
otel.runtime: none in app.yaml.
In
app.yamlname: testing24 port: 3000 otel: runtime: none
1. Install packages
bun add @opentelemetry/sdk-node \ @opentelemetry/exporter-trace-otlp-http \ @opentelemetry/exporter-metrics-otlp-http \ @opentelemetry/sdk-metrics
2. Create
src/instrumentation.tsimport { NodeSDK } from '@opentelemetry/sdk-node';
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
import { OTLPMetricExporter } from '@opentelemetry/exporter-metrics-otlp-http';
import { PeriodicExportingMetricReader } from '@opentelemetry/sdk-metrics';
const sdk = new NodeSDK({
traceExporter: new OTLPTraceExporter(),
metricReader: new PeriodicExportingMetricReader({
exporter: new OTLPMetricExporter(),
exportIntervalMillis: 15_000,
}),
});
sdk.start();
process.on('SIGTERM', () => sdk.shutdown());
3. Import first in
src/index.tsimport './instrumentation'; // must be first import // ... your app
Structured logs
const log = (level: string, message: string, extra = {}) =>
console.log(JSON.stringify({ level, message, app: process.env.OTEL_SERVICE_NAME, ...extra }));
log('info', 'server started', { port: 3000 });
Manual SDK required. Go eBPF auto-instrumentation is not enabled. Add the SDK to your code and set
otel.runtime: none in app.yaml.
In
app.yamlname: testing24 port: 3000 otel: runtime: none
1. Add dependencies
go get go.opentelemetry.io/otel \ go.opentelemetry.io/otel/sdk/trace \ go.opentelemetry.io/otel/sdk/metric \ go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp \ go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp \ go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp
2. Create
telemetry.gopackage main
import (
"context"
"time"
"go.opentelemetry.io/otel"
otlptracehttp "go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp"
otlpmetrichttp "go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp"
sdkmetric "go.opentelemetry.io/otel/sdk/metric"
sdktrace "go.opentelemetry.io/otel/sdk/trace"
)
func setupTelemetry(ctx context.Context) func() {
traceExp, _ := otlptracehttp.New(ctx)
tp := sdktrace.NewTracerProvider(sdktrace.WithBatcher(traceExp))
otel.SetTracerProvider(tp)
metricExp, _ := otlpmetrichttp.New(ctx)
mp := sdkmetric.NewMeterProvider(sdkmetric.WithReader(
sdkmetric.NewPeriodicReader(metricExp, sdkmetric.WithInterval(15*time.Second)),
))
otel.SetMeterProvider(mp)
return func() { tp.Shutdown(ctx); mp.Shutdown(ctx) }
}
3. Wrap HTTP handler
import "go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp"
func main() {
shutdown := setupTelemetry(context.Background())
defer shutdown()
mux := http.NewServeMux()
mux.HandleFunc("/healthz", func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte(`{"status":"ok"}`))
})
http.ListenAndServe(":3000", otelhttp.NewHandler(mux, "server"))
}
Deploy to production
Prod deploys are triggered by a version tag push, not pushes to main. This prevents accidental production deployments.
1
Verify dev first
All prod deploys re-tag the image already running on dev. Verify dev before promoting.
2
Push a version tag
Semver tag (e.g.
v1.0.0). The platform re-tags the dev image with this version and ArgoCD syncs the prod namespace.
git tag v1.0.0 && git push origin v1.0.0
3
Prod is live