Skip to content

Commit

Permalink
Simplify CDK ExtraArg processing with jcommander overrides
Browse files Browse the repository at this point in the history
Signed-off-by: Andre Kurait <[email protected]>
  • Loading branch information
AndreKurait committed Sep 26, 2024
1 parent 3ec8dcd commit 7b101b3
Show file tree
Hide file tree
Showing 21 changed files with 84 additions and 189 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,10 @@ public static class S3RepoInfo {

public static void main(String[] args) throws Exception {
Args arguments = new Args();
JCommander jCommander = JCommander.newBuilder().addObject(arguments).build();
JCommander jCommander = JCommander.newBuilder()
.allowParameterOverwriting(true)
.addObject(arguments)
.build();
jCommander.parse(args);

if (arguments.help) {
Expand Down
6 changes: 3 additions & 3 deletions DocumentsFromSnapshotMigration/docker/entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ echo "RFS_TARGET_PASSWORD_ARN: $RFS_TARGET_PASSWORD_ARN"
if [[ $RFS_COMMAND != *"--target-username"* ]]; then
if [[ -n "$RFS_TARGET_USER" ]]; then
echo "Using username from ENV variable RFS_TARGET_USER. Updating RFS Command with username."
RFS_COMMAND="$RFS_COMMAND --target-username $RFS_TARGET_USER"
RFS_COMMAND="$RFS_COMMAND --target-username \"$RFS_TARGET_USER\""
fi
fi

Expand All @@ -39,9 +39,9 @@ if [[ $RFS_COMMAND != *"--target-password"* ]]; then
# Append the username/password to the RFS Command if have an updated password
if [[ -n "$PASSWORD_TO_USE" ]]; then
echo "Updating RFS Command with password."
RFS_COMMAND="$RFS_COMMAND --target-password $PASSWORD_TO_USE"
RFS_COMMAND="$RFS_COMMAND --target-password \"$PASSWORD_TO_USE\""
fi
fi

echo "Executing RFS Command"
eval $RFS_COMMAND
eval $RFS_COMMAND
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ public static class Args {
"used to communicate to the target, default 10")
int maxConnections = 10;

@Parameter(names = { "--source-version" }, description = ("Optional. Version of the source cluster. Default: ES_7.10"), required = false,
@Parameter(names = { "--source-version" }, description = ("Optional. Version of the source cluster. Default: ES 7.10"), required = false,
converter = VersionConverter.class)
public Version sourceVersion = Version.fromString("ES 7.10");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,9 @@ public class MetadataMigration {
public static void main(String[] args) throws Exception {
var metadataArgs = new MetadataArgs();
var migrateArgs = new MigrateArgs();
var evaluateArgs = new EvaluateArgs();
var evaluateArgs = new EvaluateArgs();
var jCommander = JCommander.newBuilder()
.allowParameterOverwriting(true)
.addObject(metadataArgs)
.addCommand(migrateArgs)
.addCommand(evaluateArgs)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ public class MigrateOrEvaluateArgs {
public ConnectionContext.TargetArgs targetArgs = new ConnectionContext.TargetArgs();

@ParametersDelegate
public DataFilterArgs dataFilterArgs = new DataFilterArgs();
public DataFilterArgs dataFilterArgs = new DataFilterArgs();

// https://opensearch.org/docs/2.11/api-reference/cluster-api/cluster-awareness/
@Parameter(names = {"--min-replicas" }, description = "Optional. The minimum number of replicas configured for migrated indices on the target."
Expand All @@ -50,6 +50,6 @@ public class MigrateOrEvaluateArgs {
+ "forwarded. If no value is provided, metrics will not be forwarded.")
String otelCollectorEndpoint;

@Parameter(names = {"--source-version" }, description = "Version of the source cluster, for example: Elasticsearch 7.10 or OS 1.3. Defaults to: ES_7.10", converter = VersionConverter.class)
@Parameter(names = {"--source-version" }, description = "Version of the source cluster, for example: Elasticsearch 7.10 or OS 1.3. Defaults to: ES 7.10", converter = VersionConverter.class)
public Version sourceVersion = Version.fromString("ES 7.10");
}
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import org.apache.kafka.common.config.SaslConfigs;

import org.opensearch.common.settings.Settings;
import org.opensearch.migrations.jcommander.NoSplitter;
import org.opensearch.migrations.tracing.ActiveContextTracker;
import org.opensearch.migrations.tracing.ActiveContextTrackerByActivityType;
import org.opensearch.migrations.tracing.CompositeContextTracker;
Expand Down Expand Up @@ -159,12 +160,14 @@ public static class Parameters {
@Parameter(required = false,
names = "--setHeader",
arity = 2,
splitter = NoSplitter.class,
description = "[header-name header-value] Set an HTTP header (first argument) with to the specified value" +
" (second argument). Any existing headers with that name will be removed.")
public List<String> headerOverrides = new ArrayList<>();
@Parameter(required = false,
names = "--suppressCaptureForHeaderMatch",
arity = 2,
splitter = NoSplitter.class,
description = "The header name (which will be interpreted in a case-insensitive manner) and a regex "
+ "pattern. When the incoming request has a header that matches the regex, it will be passed "
+ "through to the service but will NOT be captured. E.g. user-agent 'healthcheck'.")
Expand All @@ -174,6 +177,7 @@ public static class Parameters {
static Parameters parseArgs(String[] args) {
Parameters p = new Parameters();
JCommander jCommander = new JCommander(p);
jCommander.setAllowParameterOverwriting(true);
try {
jCommander.parse(args);
// Exactly one these 3 options are required. See that exactly one is set by summing up their presence
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
public class TestHeaderRewrites {

public static final String ONLY_FOR_HEADERS_VALUE = "this is only for headers";
public static final String ONLY_FOR_HEADERS_VALUE_SPECIAL_CHARACTERS = "!@#$%^&*()_+,./:\":;\\/";
public static final String BODY_WITH_HEADERS_CONTENTS = "\n" +
"body: should stay\n" +
"body: untouched\n" +
Expand All @@ -44,6 +45,9 @@ public void testHeaderRewrites() throws Exception {
"host",
"localhost",
"--setHeader",
"specialCharacters",
ONLY_FOR_HEADERS_VALUE_SPECIAL_CHARACTERS,
"--setHeader",
"X-new-header",
"insignificant value"
);
Expand All @@ -70,7 +74,9 @@ public void testHeaderRewrites() throws Exception {
var capturedRequest = capturedRequestList.get(capturedRequestList.size()-1).getHeaders().stream()
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
Assertions.assertEquals("localhost", capturedRequest.get("host"));
Assertions.assertEquals(ONLY_FOR_HEADERS_VALUE_SPECIAL_CHARACTERS, capturedRequest.get("specialCharacters"));
Assertions.assertEquals("insignificant value", capturedRequest.get("X-new-header"));

}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ static class Parameters {
public static Parameters parseArgs(String[] args) {
Parameters p = new Parameters();
JCommander jCommander = new JCommander(p);
jCommander.setAllowParameterOverwriting(true);
try {
jCommander.parse(args);
return p;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;

import org.opensearch.migrations.jcommander.NoSplitter;
import org.opensearch.migrations.replay.tracing.RootReplayerContext;
import org.opensearch.migrations.replay.traffic.source.TrafficStreamLimiter;
import org.opensearch.migrations.replay.util.ActiveContextMonitor;
Expand Down Expand Up @@ -117,6 +118,7 @@ public static class Parameters {
@Parameter(
required = false, names = {
AWS_AUTH_HEADER_USER_AND_SECRET_ARG },
splitter = NoSplitter.class,
arity = 2,
description = "<USERNAME> <SECRET_ARN> pair to specify "
+ "\"authorization\" header value for each request. "
Expand Down Expand Up @@ -258,6 +260,7 @@ public static class Parameters {
private static Parameters parseArgs(String[] args) {
Parameters p = new Parameters();
JCommander jCommander = new JCommander(p);
jCommander.setAllowParameterOverwriting(true);
try {
jCommander.parse(args);
return p;
Expand Down
3 changes: 3 additions & 0 deletions coreUtilities/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,9 @@ dependencies {
implementation group: 'org.apache.logging.log4j', name: 'log4j-core'
implementation group: 'org.apache.logging.log4j', name: 'log4j-slf4j2-impl'

// JCommander
compileOnly group: 'org.jcommander', name: 'jcommander'

// OpenTelemetry core
api group: 'io.opentelemetry', name: 'opentelemetry-api'
api group: 'io.opentelemetry', name: 'opentelemetry-sdk'
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
package org.opensearch.migrations.jcommander;

import java.util.List;

import com.beust.jcommander.converters.IParameterSplitter;

public class NoSplitter implements IParameterSplitter {
@Override
public List<String> split(String value) {
return List.of(value);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,79 +4,8 @@ import {CpuArchitecture} from "aws-cdk-lib/aws-ecs";
import {RemovalPolicy, Stack} from "aws-cdk-lib";
import { IStringParameter, StringParameter } from "aws-cdk-lib/aws-ssm";
import * as forge from 'node-forge';
import * as yargs from 'yargs';
import { ClusterYaml } from "./migration-services-yaml";


// parseAndMergeArgs, see @common-utilities.test.ts for an example of different cases
export function parseAndMergeArgs(baseCommand: string, extraArgs?: string): string {
if (!extraArgs) {
return baseCommand;
}

// Extract command prefix
const commandPrefix = baseCommand.substring(0, baseCommand.indexOf('--')).trim();
const baseArgs = baseCommand.substring(baseCommand.indexOf('--'));

// Parse base command
const baseYargsConfig = {
parserConfiguration: {
'camel-case-expansion': false,
'boolean-negation': false,
}
};

const baseArgv = yargs(baseArgs)
.parserConfiguration(baseYargsConfig.parserConfiguration)
.parse();

// Parse extra args if provided
const extraYargsConfig = {
parserConfiguration: {
'camel-case-expansion': false,
'boolean-negation': true,
}
};

const extraArgv = extraArgs
? yargs(extraArgs.split(' '))
.parserConfiguration(extraYargsConfig.parserConfiguration)
.parse()
: {};

// Merge arguments
const mergedArgv: { [key: string]: unknown } = { ...baseArgv };
for (const [key, value] of Object.entries(extraArgv)) {
if (key !== '_' && key !== '$0') {
if (!value &&
typeof value === 'boolean' &&
(
typeof (baseArgv as any)[key] === 'boolean' ||
(typeof (baseArgv as any)[`no-${key}`] != 'boolean' && typeof (baseArgv as any)[`no-${key}`])
)
) {
delete mergedArgv[key];
} else {
mergedArgv[key] = value;
}
}
}

// Reconstruct command
const mergedArgs = Object.entries(mergedArgv)
.filter(([key]) => key !== '_' && key !== '$0')
.map(([key, value]) => {
if (typeof value === 'boolean') {
return value ? `--${key}` : `--no-${key}`;
}
return `--${key} ${value}`;
})
.join(' ');

let fullCommand = `${commandPrefix} ${mergedArgs}`.trim()
return fullCommand;
}

export function getTargetPasswordAccessPolicy(targetPasswordSecretArn: string): PolicyStatement {
return new PolicyStatement({
effect: Effect.ALLOW,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ import {
MigrationSSMParameter,
createMSKProducerIAMPolicies,
getMigrationStringParameterValue,
parseAndMergeArgs
} from "../common-utilities";
import {OtelCollectorSidecar} from "./migration-otel-collector-sidecar";

Expand Down Expand Up @@ -67,7 +66,7 @@ export class CaptureProxyESStack extends MigrationServiceCore {
command = props.streamingSourceType !== StreamingSourceType.DISABLED ? command.concat(` --kafkaConnection ${brokerEndpoints}`) : command
command = props.streamingSourceType === StreamingSourceType.AWS_MSK ? command.concat(" --enableMSKAuth") : command
command = props.otelCollectorEnabled ? command.concat(` --otelCollectorEndpoint ${OtelCollectorSidecar.getOtelLocalhostEndpoint()}`) : command
command = parseAndMergeArgs(command, props.extraArgs);
command = props.extraArgs ? command.concat(` ${props.extraArgs}`) : command

this.createService({
serviceName: "capture-proxy-es",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ import {
createMSKProducerIAMPolicies,
getCustomStringParameterValue,
getMigrationStringParameterValue,
parseAndMergeArgs
} from "../common-utilities";
import {OtelCollectorSidecar} from "./migration-otel-collector-sidecar";

Expand Down Expand Up @@ -126,7 +125,7 @@ export class CaptureProxyStack extends MigrationServiceCore {
command = props.streamingSourceType !== StreamingSourceType.DISABLED ? command.concat(` --kafkaConnection ${brokerEndpoints}`) : command
command = props.streamingSourceType === StreamingSourceType.AWS_MSK ? command.concat(" --enableMSKAuth") : command
command = props.otelCollectorEnabled ? command.concat(` --otelCollectorEndpoint ${OtelCollectorSidecar.getOtelLocalhostEndpoint()}`) : command
command = parseAndMergeArgs(command, props.extraArgs);
command = props.extraArgs ? command.concat(` ${props.extraArgs}`) : command

this.createService({
serviceName: serviceName,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ import {
createOpenSearchServerlessIAMAccessPolicy,
getTargetPasswordAccessPolicy,
getMigrationStringParameterValue,
parseAndMergeArgs,
ClusterAuth
} from "../common-utilities";
import { RFSBackfillYaml, SnapshotYaml } from "../migration-services-yaml";
Expand Down Expand Up @@ -69,11 +68,12 @@ export class ReindexFromSnapshotStack extends MigrationServiceCore {
parameter: MigrationSSMParameter.OS_CLUSTER_ENDPOINT,
});
const s3Uri = `s3://migration-artifacts-${this.account}-${props.stage}-${this.region}/rfs-snapshot-repo`;
let rfsCommand = `/rfs-app/runJavaWithClasspath.sh org.opensearch.migrations.RfsMigrateDocuments --s3-local-dir /tmp/s3_files --s3-repo-uri ${s3Uri} --s3-region ${this.region} --snapshot-name rfs-snapshot --lucene-dir '/lucene' --target-host ${osClusterEndpoint}`
let rfsCommand = `/rfs-app/runJavaWithClasspath.sh org.opensearch.migrations.RfsMigrateDocuments --s3-local-dir /tmp/s3_files --s3-repo-uri \"${s3Uri}\" --s3-region ${this.region} --snapshot-name rfs-snapshot --lucene-dir '/lucene' --target-host ${osClusterEndpoint}`
rfsCommand = props.clusterAuthDetails.sigv4 ? rfsCommand.concat(`--target-aws-service-signing-name ${props.clusterAuthDetails.sigv4.serviceSigningName} --target-aws-region ${props.clusterAuthDetails.sigv4.region}`) : rfsCommand
rfsCommand = props.otelCollectorEnabled ? rfsCommand.concat(` --otel-collector-endpoint ${OtelCollectorSidecar.getOtelLocalhostEndpoint()}`) : rfsCommand
rfsCommand = props.sourceClusterVersion ? rfsCommand.concat(` --source-version ${props.sourceClusterVersion}`) : rfsCommand
rfsCommand = parseAndMergeArgs(rfsCommand, props.extraArgs);
rfsCommand = props.sourceClusterVersion ? rfsCommand.concat(` --source-version \"${props.sourceClusterVersion}\"`) : rfsCommand
// TODO: This approach with extraArgs may not work with the entryPoint env arg processing that is occurring here. https://opensearch.atlassian.net/browse/MIGRATIONS-2025
rfsCommand = props.extraArgs ? rfsCommand.concat(` ${props.extraArgs}`) : rfsCommand

let targetUser = "";
let targetPassword = "";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import {
createMSKConsumerIAMPolicies,
createOpenSearchIAMAccessPolicy,
createOpenSearchServerlessIAMAccessPolicy,
getMigrationStringParameterValue, parseAndMergeArgs
getMigrationStringParameterValue
} from "../common-utilities";
import {StreamingSourceType} from "../streaming-source-type";
import { Duration } from "aws-cdk-lib";
Expand Down Expand Up @@ -80,15 +80,15 @@ export class TrafficReplayerStack extends MigrationServiceCore {
});
const groupId = props.customKafkaGroupId ? props.customKafkaGroupId : `logging-group-${deployId}`

let replayerCommand = `/runJavaWithClasspath.sh org.opensearch.migrations.replay.TrafficReplayer ${osClusterEndpoint} --insecure --kafka-traffic-brokers ${brokerEndpoints} --kafka-traffic-topic logging-traffic-topic --kafka-traffic-group-id ${groupId}`
let replayerCommand = `/runJavaWithClasspath.sh org.opensearch.migrations.replay.TrafficReplayer ${osClusterEndpoint} --insecure --kafka-traffic-brokers ${brokerEndpoints} --kafka-traffic-topic logging-traffic-topic --kafka-traffic-group-id \"${groupId}\"`
if (props.clusterAuthDetails.basicAuth) {
replayerCommand = replayerCommand.concat(` --auth-header-user-and-secret "${props.clusterAuthDetails.basicAuth.username} ${props.clusterAuthDetails.basicAuth.password_from_secret_arn}"`)
replayerCommand = replayerCommand.concat(` --auth-header-user-and-secret \"${props.clusterAuthDetails.basicAuth.username}\" \"${props.clusterAuthDetails.basicAuth.password_from_secret_arn}\""`)
}
replayerCommand = props.streamingSourceType === StreamingSourceType.AWS_MSK ? replayerCommand.concat(" --kafka-traffic-enable-msk-auth") : replayerCommand
replayerCommand = props.userAgentSuffix ? replayerCommand.concat(` --user-agent ${props.userAgentSuffix}`) : replayerCommand
replayerCommand = props.userAgentSuffix ? replayerCommand.concat(` --user-agent \"${props.userAgentSuffix}\"`) : replayerCommand
replayerCommand = props.clusterAuthDetails.sigv4 ? replayerCommand.concat(` --sigv4-auth-header-service-region ${props.clusterAuthDetails.sigv4.serviceSigningName},${props.clusterAuthDetails.sigv4.region}`) : replayerCommand
replayerCommand = props.otelCollectorEnabled ? replayerCommand.concat(` --otelCollectorEndpoint ${OtelCollectorSidecar.getOtelLocalhostEndpoint()}`) : replayerCommand
replayerCommand = parseAndMergeArgs(replayerCommand, props.extraArgs);
replayerCommand = props.extraArgs ? replayerCommand.concat(` ${props.extraArgs}`) : replayerCommand

this.createService({
serviceName: `traffic-replayer-${deployId}`,
Expand Down
6 changes: 2 additions & 4 deletions deployment/cdk/opensearch-service-migration/options.md
Original file line number Diff line number Diff line change
Expand Up @@ -142,8 +142,6 @@ A number of options are currently available but deprecated. While they function
<!-- Footnotes -->

[^1]: Extra arguments can be added, overridden, or removed as follows:
- To add a new argument: Include the argument with the value, e.g., `"--new-arg value"`
- To override an existing argument: Include the argument with the new value, e.g., `"--override-arg new-value"`
- To remove an argument: Use the negated form, e.g., `"--no-existing-arg"`
- To add/override an argument: Include the argument with the value, e.g., `"--new-arg value"`
- Include quotes/escaping as appropriate for bash processing `"--new-arg \"my value\""`

Example: `"--new-arg value --existing-arg new-value --no-unwanted-arg"`
Loading

0 comments on commit 7b101b3

Please sign in to comment.