Backend half
This commit is contained in:
+201
@@ -0,0 +1,201 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
+45
@@ -0,0 +1,45 @@
|
||||
# @smithy/core
|
||||
|
||||
[](https://www.npmjs.com/package/@smithy/core)
|
||||
[](https://www.npmjs.com/package/@smithy/core)
|
||||
|
||||
> An internal package. You probably shouldn't use this package, at least directly.
|
||||
|
||||
This package provides common or core functionality for generic Smithy clients.
|
||||
|
||||
You do not need to explicitly install this package, since it will be installed during code generation if used.
|
||||
|
||||
## Development of `@smithy/core` submodules
|
||||
|
||||
Core submodules are organized for distribution via the `package.json` `exports` field.
|
||||
|
||||
`exports` is supported by default by the latest Node.js, webpack, and esbuild. For react-native, it can be
|
||||
enabled via instructions found at [reactnative.dev/blog](https://reactnative.dev/blog/2023/06/21/package-exports-support), but we also provide a compatibility redirect.
|
||||
|
||||
Think of `@smithy/core` as a mono-package within the monorepo.
|
||||
It preserves the benefits of modularization, for example to optimize Node.js initialization speed,
|
||||
while making it easier to have a consistent version of core dependencies, reducing package sprawl when
|
||||
installing a Smithy runtime client.
|
||||
|
||||
### Guide for submodules
|
||||
|
||||
- Each `index.ts` file corresponding to the pattern `./src/submodules/<MODULE_NAME>/index.ts` will be
|
||||
published as a separate `dist-cjs` bundled submodule index using the `Inliner.js` build script.
|
||||
- create a folder as `./src/submodules/<SUBMODULE>` including an `index.ts` file and a `README.md` file.
|
||||
- The linter will throw an error on missing submodule metadata in `package.json` and the various `tsconfig.json` files, but it will automatically fix them if possible.
|
||||
- a submodule is equivalent to a standalone `@smithy/<pkg>` package in that importing it in Node.js will resolve a separate bundle.
|
||||
- submodules may not relatively import files from other submodules. Instead, directly use the `@scope/pkg/submodule` name as the import.
|
||||
- The linter will check for this and throw an error.
|
||||
- To the extent possible, correctly declaring submodule metadata is validated by the linter in `@smithy/core`.
|
||||
The linter runs during `yarn build` and also as `yarn lint`.
|
||||
|
||||
### When should I create an `@smithy/core/submodule` vs. `@smithy/new-package`?
|
||||
|
||||
Keep in mind that the core package is installed by all downstream clients.
|
||||
|
||||
If the component functionality is upstream of multiple clients, it is
|
||||
a good candidate for a core submodule. For example, if `middleware-retry` had been written
|
||||
after the support for submodules was added, it would have been a submodule.
|
||||
|
||||
If the component's functionality is downstream of a client (rare), or only expected to be used by a very small
|
||||
subset of clients, it could be written as a standalone package.
|
||||
+7
@@ -0,0 +1,7 @@
|
||||
/**
|
||||
* Do not edit:
|
||||
* This is a compatibility redirect for contexts that do not understand package.json exports field.
|
||||
*/
|
||||
declare module "@smithy/core/cbor" {
|
||||
export * from "@smithy/core/dist-types/submodules/cbor/index.d";
|
||||
}
|
||||
+6
@@ -0,0 +1,6 @@
|
||||
|
||||
/**
|
||||
* Do not edit:
|
||||
* This is a compatibility redirect for contexts that do not understand package.json exports field.
|
||||
*/
|
||||
module.exports = require("./dist-cjs/submodules/cbor/index.js");
|
||||
+1
@@ -0,0 +1 @@
|
||||
module.exports = require("./index.js");
|
||||
+454
@@ -0,0 +1,454 @@
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
|
||||
// src/index.ts
|
||||
var src_exports = {};
|
||||
__export(src_exports, {
|
||||
DefaultIdentityProviderConfig: () => DefaultIdentityProviderConfig,
|
||||
EXPIRATION_MS: () => EXPIRATION_MS,
|
||||
HttpApiKeyAuthSigner: () => HttpApiKeyAuthSigner,
|
||||
HttpBearerAuthSigner: () => HttpBearerAuthSigner,
|
||||
NoAuthSigner: () => NoAuthSigner,
|
||||
createIsIdentityExpiredFunction: () => createIsIdentityExpiredFunction,
|
||||
createPaginator: () => createPaginator,
|
||||
doesIdentityRequireRefresh: () => doesIdentityRequireRefresh,
|
||||
getHttpAuthSchemeEndpointRuleSetPlugin: () => getHttpAuthSchemeEndpointRuleSetPlugin,
|
||||
getHttpAuthSchemePlugin: () => getHttpAuthSchemePlugin,
|
||||
getHttpSigningPlugin: () => getHttpSigningPlugin,
|
||||
getSmithyContext: () => getSmithyContext,
|
||||
httpAuthSchemeEndpointRuleSetMiddlewareOptions: () => httpAuthSchemeEndpointRuleSetMiddlewareOptions,
|
||||
httpAuthSchemeMiddleware: () => httpAuthSchemeMiddleware,
|
||||
httpAuthSchemeMiddlewareOptions: () => httpAuthSchemeMiddlewareOptions,
|
||||
httpSigningMiddleware: () => httpSigningMiddleware,
|
||||
httpSigningMiddlewareOptions: () => httpSigningMiddlewareOptions,
|
||||
isIdentityExpired: () => isIdentityExpired,
|
||||
memoizeIdentityProvider: () => memoizeIdentityProvider,
|
||||
normalizeProvider: () => normalizeProvider,
|
||||
requestBuilder: () => import_protocols.requestBuilder,
|
||||
setFeature: () => setFeature
|
||||
});
|
||||
module.exports = __toCommonJS(src_exports);
|
||||
|
||||
// src/getSmithyContext.ts
|
||||
var import_types = require("@smithy/types");
|
||||
var getSmithyContext = /* @__PURE__ */ __name((context) => context[import_types.SMITHY_CONTEXT_KEY] || (context[import_types.SMITHY_CONTEXT_KEY] = {}), "getSmithyContext");
|
||||
|
||||
// src/middleware-http-auth-scheme/httpAuthSchemeMiddleware.ts
|
||||
var import_util_middleware = require("@smithy/util-middleware");
|
||||
|
||||
// src/middleware-http-auth-scheme/resolveAuthOptions.ts
|
||||
var resolveAuthOptions = /* @__PURE__ */ __name((candidateAuthOptions, authSchemePreference) => {
|
||||
if (!authSchemePreference || authSchemePreference.length === 0) {
|
||||
return candidateAuthOptions;
|
||||
}
|
||||
const preferredAuthOptions = [];
|
||||
for (const preferredSchemeName of authSchemePreference) {
|
||||
for (const candidateAuthOption of candidateAuthOptions) {
|
||||
const candidateAuthSchemeName = candidateAuthOption.schemeId.split("#")[1];
|
||||
if (candidateAuthSchemeName === preferredSchemeName) {
|
||||
preferredAuthOptions.push(candidateAuthOption);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const candidateAuthOption of candidateAuthOptions) {
|
||||
if (!preferredAuthOptions.find(({ schemeId }) => schemeId === candidateAuthOption.schemeId)) {
|
||||
preferredAuthOptions.push(candidateAuthOption);
|
||||
}
|
||||
}
|
||||
return preferredAuthOptions;
|
||||
}, "resolveAuthOptions");
|
||||
|
||||
// src/middleware-http-auth-scheme/httpAuthSchemeMiddleware.ts
|
||||
function convertHttpAuthSchemesToMap(httpAuthSchemes) {
|
||||
const map = /* @__PURE__ */ new Map();
|
||||
for (const scheme of httpAuthSchemes) {
|
||||
map.set(scheme.schemeId, scheme);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
__name(convertHttpAuthSchemesToMap, "convertHttpAuthSchemesToMap");
|
||||
var httpAuthSchemeMiddleware = /* @__PURE__ */ __name((config, mwOptions) => (next, context) => async (args) => {
|
||||
const options = config.httpAuthSchemeProvider(
|
||||
await mwOptions.httpAuthSchemeParametersProvider(config, context, args.input)
|
||||
);
|
||||
const authSchemePreference = config.authSchemePreference ? await config.authSchemePreference() : [];
|
||||
const resolvedOptions = resolveAuthOptions(options, authSchemePreference);
|
||||
const authSchemes = convertHttpAuthSchemesToMap(config.httpAuthSchemes);
|
||||
const smithyContext = (0, import_util_middleware.getSmithyContext)(context);
|
||||
const failureReasons = [];
|
||||
for (const option of resolvedOptions) {
|
||||
const scheme = authSchemes.get(option.schemeId);
|
||||
if (!scheme) {
|
||||
failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` was not enabled for this service.`);
|
||||
continue;
|
||||
}
|
||||
const identityProvider = scheme.identityProvider(await mwOptions.identityProviderConfigProvider(config));
|
||||
if (!identityProvider) {
|
||||
failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` did not have an IdentityProvider configured.`);
|
||||
continue;
|
||||
}
|
||||
const { identityProperties = {}, signingProperties = {} } = option.propertiesExtractor?.(config, context) || {};
|
||||
option.identityProperties = Object.assign(option.identityProperties || {}, identityProperties);
|
||||
option.signingProperties = Object.assign(option.signingProperties || {}, signingProperties);
|
||||
smithyContext.selectedHttpAuthScheme = {
|
||||
httpAuthOption: option,
|
||||
identity: await identityProvider(option.identityProperties),
|
||||
signer: scheme.signer
|
||||
};
|
||||
break;
|
||||
}
|
||||
if (!smithyContext.selectedHttpAuthScheme) {
|
||||
throw new Error(failureReasons.join("\n"));
|
||||
}
|
||||
return next(args);
|
||||
}, "httpAuthSchemeMiddleware");
|
||||
|
||||
// src/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.ts
|
||||
var httpAuthSchemeEndpointRuleSetMiddlewareOptions = {
|
||||
step: "serialize",
|
||||
tags: ["HTTP_AUTH_SCHEME"],
|
||||
name: "httpAuthSchemeMiddleware",
|
||||
override: true,
|
||||
relation: "before",
|
||||
toMiddleware: "endpointV2Middleware"
|
||||
};
|
||||
var getHttpAuthSchemeEndpointRuleSetPlugin = /* @__PURE__ */ __name((config, {
|
||||
httpAuthSchemeParametersProvider,
|
||||
identityProviderConfigProvider
|
||||
}) => ({
|
||||
applyToStack: (clientStack) => {
|
||||
clientStack.addRelativeTo(
|
||||
httpAuthSchemeMiddleware(config, {
|
||||
httpAuthSchemeParametersProvider,
|
||||
identityProviderConfigProvider
|
||||
}),
|
||||
httpAuthSchemeEndpointRuleSetMiddlewareOptions
|
||||
);
|
||||
}
|
||||
}), "getHttpAuthSchemeEndpointRuleSetPlugin");
|
||||
|
||||
// src/middleware-http-auth-scheme/getHttpAuthSchemePlugin.ts
|
||||
var import_middleware_serde = require("@smithy/middleware-serde");
|
||||
var httpAuthSchemeMiddlewareOptions = {
|
||||
step: "serialize",
|
||||
tags: ["HTTP_AUTH_SCHEME"],
|
||||
name: "httpAuthSchemeMiddleware",
|
||||
override: true,
|
||||
relation: "before",
|
||||
toMiddleware: import_middleware_serde.serializerMiddlewareOption.name
|
||||
};
|
||||
var getHttpAuthSchemePlugin = /* @__PURE__ */ __name((config, {
|
||||
httpAuthSchemeParametersProvider,
|
||||
identityProviderConfigProvider
|
||||
}) => ({
|
||||
applyToStack: (clientStack) => {
|
||||
clientStack.addRelativeTo(
|
||||
httpAuthSchemeMiddleware(config, {
|
||||
httpAuthSchemeParametersProvider,
|
||||
identityProviderConfigProvider
|
||||
}),
|
||||
httpAuthSchemeMiddlewareOptions
|
||||
);
|
||||
}
|
||||
}), "getHttpAuthSchemePlugin");
|
||||
|
||||
// src/middleware-http-signing/httpSigningMiddleware.ts
|
||||
var import_protocol_http = require("@smithy/protocol-http");
|
||||
|
||||
var defaultErrorHandler = /* @__PURE__ */ __name((signingProperties) => (error) => {
|
||||
throw error;
|
||||
}, "defaultErrorHandler");
|
||||
var defaultSuccessHandler = /* @__PURE__ */ __name((httpResponse, signingProperties) => {
|
||||
}, "defaultSuccessHandler");
|
||||
var httpSigningMiddleware = /* @__PURE__ */ __name((config) => (next, context) => async (args) => {
|
||||
if (!import_protocol_http.HttpRequest.isInstance(args.request)) {
|
||||
return next(args);
|
||||
}
|
||||
const smithyContext = (0, import_util_middleware.getSmithyContext)(context);
|
||||
const scheme = smithyContext.selectedHttpAuthScheme;
|
||||
if (!scheme) {
|
||||
throw new Error(`No HttpAuthScheme was selected: unable to sign request`);
|
||||
}
|
||||
const {
|
||||
httpAuthOption: { signingProperties = {} },
|
||||
identity,
|
||||
signer
|
||||
} = scheme;
|
||||
const output = await next({
|
||||
...args,
|
||||
request: await signer.sign(args.request, identity, signingProperties)
|
||||
}).catch((signer.errorHandler || defaultErrorHandler)(signingProperties));
|
||||
(signer.successHandler || defaultSuccessHandler)(output.response, signingProperties);
|
||||
return output;
|
||||
}, "httpSigningMiddleware");
|
||||
|
||||
// src/middleware-http-signing/getHttpSigningMiddleware.ts
|
||||
var httpSigningMiddlewareOptions = {
|
||||
step: "finalizeRequest",
|
||||
tags: ["HTTP_SIGNING"],
|
||||
name: "httpSigningMiddleware",
|
||||
aliases: ["apiKeyMiddleware", "tokenMiddleware", "awsAuthMiddleware"],
|
||||
override: true,
|
||||
relation: "after",
|
||||
toMiddleware: "retryMiddleware"
|
||||
};
|
||||
var getHttpSigningPlugin = /* @__PURE__ */ __name((config) => ({
|
||||
applyToStack: (clientStack) => {
|
||||
clientStack.addRelativeTo(httpSigningMiddleware(config), httpSigningMiddlewareOptions);
|
||||
}
|
||||
}), "getHttpSigningPlugin");
|
||||
|
||||
// src/normalizeProvider.ts
|
||||
var normalizeProvider = /* @__PURE__ */ __name((input) => {
|
||||
if (typeof input === "function")
|
||||
return input;
|
||||
const promisified = Promise.resolve(input);
|
||||
return () => promisified;
|
||||
}, "normalizeProvider");
|
||||
|
||||
// src/pagination/createPaginator.ts
|
||||
var makePagedClientRequest = /* @__PURE__ */ __name(async (CommandCtor, client, input, withCommand = (_) => _, ...args) => {
|
||||
let command = new CommandCtor(input);
|
||||
command = withCommand(command) ?? command;
|
||||
return await client.send(command, ...args);
|
||||
}, "makePagedClientRequest");
|
||||
function createPaginator(ClientCtor, CommandCtor, inputTokenName, outputTokenName, pageSizeTokenName) {
|
||||
return /* @__PURE__ */ __name(async function* paginateOperation(config, input, ...additionalArguments) {
|
||||
const _input = input;
|
||||
let token = config.startingToken ?? _input[inputTokenName];
|
||||
let hasNext = true;
|
||||
let page;
|
||||
while (hasNext) {
|
||||
_input[inputTokenName] = token;
|
||||
if (pageSizeTokenName) {
|
||||
_input[pageSizeTokenName] = _input[pageSizeTokenName] ?? config.pageSize;
|
||||
}
|
||||
if (config.client instanceof ClientCtor) {
|
||||
page = await makePagedClientRequest(
|
||||
CommandCtor,
|
||||
config.client,
|
||||
input,
|
||||
config.withCommand,
|
||||
...additionalArguments
|
||||
);
|
||||
} else {
|
||||
throw new Error(`Invalid client, expected instance of ${ClientCtor.name}`);
|
||||
}
|
||||
yield page;
|
||||
const prevToken = token;
|
||||
token = get(page, outputTokenName);
|
||||
hasNext = !!(token && (!config.stopOnSameToken || token !== prevToken));
|
||||
}
|
||||
return void 0;
|
||||
}, "paginateOperation");
|
||||
}
|
||||
__name(createPaginator, "createPaginator");
|
||||
var get = /* @__PURE__ */ __name((fromObject, path) => {
|
||||
let cursor = fromObject;
|
||||
const pathComponents = path.split(".");
|
||||
for (const step of pathComponents) {
|
||||
if (!cursor || typeof cursor !== "object") {
|
||||
return void 0;
|
||||
}
|
||||
cursor = cursor[step];
|
||||
}
|
||||
return cursor;
|
||||
}, "get");
|
||||
|
||||
// src/protocols/requestBuilder.ts
|
||||
var import_protocols = require("@smithy/core/protocols");
|
||||
|
||||
// src/setFeature.ts
|
||||
function setFeature(context, feature, value) {
|
||||
if (!context.__smithy_context) {
|
||||
context.__smithy_context = {
|
||||
features: {}
|
||||
};
|
||||
} else if (!context.__smithy_context.features) {
|
||||
context.__smithy_context.features = {};
|
||||
}
|
||||
context.__smithy_context.features[feature] = value;
|
||||
}
|
||||
__name(setFeature, "setFeature");
|
||||
|
||||
// src/util-identity-and-auth/DefaultIdentityProviderConfig.ts
|
||||
var DefaultIdentityProviderConfig = class {
|
||||
/**
|
||||
* Creates an IdentityProviderConfig with a record of scheme IDs to identity providers.
|
||||
*
|
||||
* @param config scheme IDs and identity providers to configure
|
||||
*/
|
||||
constructor(config) {
|
||||
this.authSchemes = /* @__PURE__ */ new Map();
|
||||
for (const [key, value] of Object.entries(config)) {
|
||||
if (value !== void 0) {
|
||||
this.authSchemes.set(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
static {
|
||||
__name(this, "DefaultIdentityProviderConfig");
|
||||
}
|
||||
getIdentityProvider(schemeId) {
|
||||
return this.authSchemes.get(schemeId);
|
||||
}
|
||||
};
|
||||
|
||||
// src/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.ts
|
||||
|
||||
|
||||
var HttpApiKeyAuthSigner = class {
|
||||
static {
|
||||
__name(this, "HttpApiKeyAuthSigner");
|
||||
}
|
||||
async sign(httpRequest, identity, signingProperties) {
|
||||
if (!signingProperties) {
|
||||
throw new Error(
|
||||
"request could not be signed with `apiKey` since the `name` and `in` signer properties are missing"
|
||||
);
|
||||
}
|
||||
if (!signingProperties.name) {
|
||||
throw new Error("request could not be signed with `apiKey` since the `name` signer property is missing");
|
||||
}
|
||||
if (!signingProperties.in) {
|
||||
throw new Error("request could not be signed with `apiKey` since the `in` signer property is missing");
|
||||
}
|
||||
if (!identity.apiKey) {
|
||||
throw new Error("request could not be signed with `apiKey` since the `apiKey` is not defined");
|
||||
}
|
||||
const clonedRequest = import_protocol_http.HttpRequest.clone(httpRequest);
|
||||
if (signingProperties.in === import_types.HttpApiKeyAuthLocation.QUERY) {
|
||||
clonedRequest.query[signingProperties.name] = identity.apiKey;
|
||||
} else if (signingProperties.in === import_types.HttpApiKeyAuthLocation.HEADER) {
|
||||
clonedRequest.headers[signingProperties.name] = signingProperties.scheme ? `${signingProperties.scheme} ${identity.apiKey}` : identity.apiKey;
|
||||
} else {
|
||||
throw new Error(
|
||||
"request can only be signed with `apiKey` locations `query` or `header`, but found: `" + signingProperties.in + "`"
|
||||
);
|
||||
}
|
||||
return clonedRequest;
|
||||
}
|
||||
};
|
||||
|
||||
// src/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.ts
|
||||
|
||||
var HttpBearerAuthSigner = class {
|
||||
static {
|
||||
__name(this, "HttpBearerAuthSigner");
|
||||
}
|
||||
async sign(httpRequest, identity, signingProperties) {
|
||||
const clonedRequest = import_protocol_http.HttpRequest.clone(httpRequest);
|
||||
if (!identity.token) {
|
||||
throw new Error("request could not be signed with `token` since the `token` is not defined");
|
||||
}
|
||||
clonedRequest.headers["Authorization"] = `Bearer ${identity.token}`;
|
||||
return clonedRequest;
|
||||
}
|
||||
};
|
||||
|
||||
// src/util-identity-and-auth/httpAuthSchemes/noAuth.ts
|
||||
var NoAuthSigner = class {
|
||||
static {
|
||||
__name(this, "NoAuthSigner");
|
||||
}
|
||||
async sign(httpRequest, identity, signingProperties) {
|
||||
return httpRequest;
|
||||
}
|
||||
};
|
||||
|
||||
// src/util-identity-and-auth/memoizeIdentityProvider.ts
|
||||
var createIsIdentityExpiredFunction = /* @__PURE__ */ __name((expirationMs) => (identity) => doesIdentityRequireRefresh(identity) && identity.expiration.getTime() - Date.now() < expirationMs, "createIsIdentityExpiredFunction");
|
||||
var EXPIRATION_MS = 3e5;
|
||||
var isIdentityExpired = createIsIdentityExpiredFunction(EXPIRATION_MS);
|
||||
var doesIdentityRequireRefresh = /* @__PURE__ */ __name((identity) => identity.expiration !== void 0, "doesIdentityRequireRefresh");
|
||||
var memoizeIdentityProvider = /* @__PURE__ */ __name((provider, isExpired, requiresRefresh) => {
|
||||
if (provider === void 0) {
|
||||
return void 0;
|
||||
}
|
||||
const normalizedProvider = typeof provider !== "function" ? async () => Promise.resolve(provider) : provider;
|
||||
let resolved;
|
||||
let pending;
|
||||
let hasResult;
|
||||
let isConstant = false;
|
||||
const coalesceProvider = /* @__PURE__ */ __name(async (options) => {
|
||||
if (!pending) {
|
||||
pending = normalizedProvider(options);
|
||||
}
|
||||
try {
|
||||
resolved = await pending;
|
||||
hasResult = true;
|
||||
isConstant = false;
|
||||
} finally {
|
||||
pending = void 0;
|
||||
}
|
||||
return resolved;
|
||||
}, "coalesceProvider");
|
||||
if (isExpired === void 0) {
|
||||
return async (options) => {
|
||||
if (!hasResult || options?.forceRefresh) {
|
||||
resolved = await coalesceProvider(options);
|
||||
}
|
||||
return resolved;
|
||||
};
|
||||
}
|
||||
return async (options) => {
|
||||
if (!hasResult || options?.forceRefresh) {
|
||||
resolved = await coalesceProvider(options);
|
||||
}
|
||||
if (isConstant) {
|
||||
return resolved;
|
||||
}
|
||||
if (!requiresRefresh(resolved)) {
|
||||
isConstant = true;
|
||||
return resolved;
|
||||
}
|
||||
if (isExpired(resolved)) {
|
||||
await coalesceProvider(options);
|
||||
return resolved;
|
||||
}
|
||||
return resolved;
|
||||
};
|
||||
}, "memoizeIdentityProvider");
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
|
||||
0 && (module.exports = {
|
||||
createPaginator,
|
||||
getSmithyContext,
|
||||
httpAuthSchemeMiddleware,
|
||||
httpAuthSchemeEndpointRuleSetMiddlewareOptions,
|
||||
getHttpAuthSchemeEndpointRuleSetPlugin,
|
||||
httpAuthSchemeMiddlewareOptions,
|
||||
getHttpAuthSchemePlugin,
|
||||
httpSigningMiddleware,
|
||||
httpSigningMiddlewareOptions,
|
||||
getHttpSigningPlugin,
|
||||
normalizeProvider,
|
||||
requestBuilder,
|
||||
setFeature,
|
||||
DefaultIdentityProviderConfig,
|
||||
HttpApiKeyAuthSigner,
|
||||
HttpBearerAuthSigner,
|
||||
NoAuthSigner,
|
||||
createIsIdentityExpiredFunction,
|
||||
EXPIRATION_MS,
|
||||
isIdentityExpired,
|
||||
doesIdentityRequireRefresh,
|
||||
memoizeIdentityProvider
|
||||
});
|
||||
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
module.exports = require("../index.js");
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
module.exports = require("../index.js");
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
module.exports = require("../index.js");
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
module.exports = require("../index.js");
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
module.exports = require("../index.js");
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
module.exports = require("../index.js");
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
module.exports = require("../index.js");
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
module.exports = require("../index.js");
|
||||
+1
@@ -0,0 +1 @@
|
||||
module.exports = require("./index.js");
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
module.exports = require("../index.js");
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
module.exports = require("../index.js");
|
||||
+1
@@ -0,0 +1 @@
|
||||
module.exports = require("./index.js");
|
||||
+990
@@ -0,0 +1,990 @@
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
|
||||
// src/submodules/cbor/index.ts
|
||||
var cbor_exports = {};
|
||||
__export(cbor_exports, {
|
||||
CborCodec: () => CborCodec,
|
||||
CborShapeDeserializer: () => CborShapeDeserializer,
|
||||
CborShapeSerializer: () => CborShapeSerializer,
|
||||
SmithyRpcV2CborProtocol: () => SmithyRpcV2CborProtocol,
|
||||
buildHttpRpcRequest: () => buildHttpRpcRequest,
|
||||
cbor: () => cbor,
|
||||
checkCborResponse: () => checkCborResponse,
|
||||
dateToTag: () => dateToTag,
|
||||
loadSmithyRpcV2CborErrorCode: () => loadSmithyRpcV2CborErrorCode,
|
||||
parseCborBody: () => parseCborBody,
|
||||
parseCborErrorBody: () => parseCborErrorBody,
|
||||
tag: () => tag,
|
||||
tagSymbol: () => tagSymbol
|
||||
});
|
||||
module.exports = __toCommonJS(cbor_exports);
|
||||
|
||||
// src/submodules/cbor/cbor-decode.ts
|
||||
var import_serde = require("@smithy/core/serde");
|
||||
var import_util_utf8 = require("@smithy/util-utf8");
|
||||
|
||||
// src/submodules/cbor/cbor-types.ts
|
||||
var majorUint64 = 0;
|
||||
var majorNegativeInt64 = 1;
|
||||
var majorUnstructuredByteString = 2;
|
||||
var majorUtf8String = 3;
|
||||
var majorList = 4;
|
||||
var majorMap = 5;
|
||||
var majorTag = 6;
|
||||
var majorSpecial = 7;
|
||||
var specialFalse = 20;
|
||||
var specialTrue = 21;
|
||||
var specialNull = 22;
|
||||
var specialUndefined = 23;
|
||||
var extendedOneByte = 24;
|
||||
var extendedFloat16 = 25;
|
||||
var extendedFloat32 = 26;
|
||||
var extendedFloat64 = 27;
|
||||
var minorIndefinite = 31;
|
||||
function alloc(size) {
|
||||
return typeof Buffer !== "undefined" ? Buffer.alloc(size) : new Uint8Array(size);
|
||||
}
|
||||
var tagSymbol = Symbol("@smithy/core/cbor::tagSymbol");
|
||||
function tag(data2) {
|
||||
data2[tagSymbol] = true;
|
||||
return data2;
|
||||
}
|
||||
|
||||
// src/submodules/cbor/cbor-decode.ts
|
||||
var USE_TEXT_DECODER = typeof TextDecoder !== "undefined";
|
||||
var USE_BUFFER = typeof Buffer !== "undefined";
|
||||
var payload = alloc(0);
|
||||
var dataView = new DataView(payload.buffer, payload.byteOffset, payload.byteLength);
|
||||
var textDecoder = USE_TEXT_DECODER ? new TextDecoder() : null;
|
||||
var _offset = 0;
|
||||
function setPayload(bytes) {
|
||||
payload = bytes;
|
||||
dataView = new DataView(payload.buffer, payload.byteOffset, payload.byteLength);
|
||||
}
|
||||
function decode(at, to) {
|
||||
if (at >= to) {
|
||||
throw new Error("unexpected end of (decode) payload.");
|
||||
}
|
||||
const major = (payload[at] & 224) >> 5;
|
||||
const minor = payload[at] & 31;
|
||||
switch (major) {
|
||||
case majorUint64:
|
||||
case majorNegativeInt64:
|
||||
case majorTag:
|
||||
let unsignedInt;
|
||||
let offset;
|
||||
if (minor < 24) {
|
||||
unsignedInt = minor;
|
||||
offset = 1;
|
||||
} else {
|
||||
switch (minor) {
|
||||
case extendedOneByte:
|
||||
case extendedFloat16:
|
||||
case extendedFloat32:
|
||||
case extendedFloat64:
|
||||
const countLength = minorValueToArgumentLength[minor];
|
||||
const countOffset = countLength + 1;
|
||||
offset = countOffset;
|
||||
if (to - at < countOffset) {
|
||||
throw new Error(`countLength ${countLength} greater than remaining buf len.`);
|
||||
}
|
||||
const countIndex = at + 1;
|
||||
if (countLength === 1) {
|
||||
unsignedInt = payload[countIndex];
|
||||
} else if (countLength === 2) {
|
||||
unsignedInt = dataView.getUint16(countIndex);
|
||||
} else if (countLength === 4) {
|
||||
unsignedInt = dataView.getUint32(countIndex);
|
||||
} else {
|
||||
unsignedInt = dataView.getBigUint64(countIndex);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new Error(`unexpected minor value ${minor}.`);
|
||||
}
|
||||
}
|
||||
if (major === majorUint64) {
|
||||
_offset = offset;
|
||||
return castBigInt(unsignedInt);
|
||||
} else if (major === majorNegativeInt64) {
|
||||
let negativeInt;
|
||||
if (typeof unsignedInt === "bigint") {
|
||||
negativeInt = BigInt(-1) - unsignedInt;
|
||||
} else {
|
||||
negativeInt = -1 - unsignedInt;
|
||||
}
|
||||
_offset = offset;
|
||||
return castBigInt(negativeInt);
|
||||
} else {
|
||||
if (minor === 2 || minor === 3) {
|
||||
const length = decodeCount(at + offset, to);
|
||||
let b = BigInt(0);
|
||||
const start = at + offset + _offset;
|
||||
for (let i = start; i < start + length; ++i) {
|
||||
b = b << BigInt(8) | BigInt(payload[i]);
|
||||
}
|
||||
_offset = offset + length;
|
||||
return minor === 3 ? -b - BigInt(1) : b;
|
||||
} else if (minor === 4) {
|
||||
const decimalFraction = decode(at + offset, to);
|
||||
const [exponent, mantissa] = decimalFraction;
|
||||
const s = mantissa.toString();
|
||||
const numericString = exponent === 0 ? s : s.slice(0, s.length + exponent) + "." + s.slice(exponent);
|
||||
return new import_serde.NumericValue(numericString, "bigDecimal");
|
||||
} else {
|
||||
const value = decode(at + offset, to);
|
||||
const valueOffset = _offset;
|
||||
_offset = offset + valueOffset;
|
||||
return tag({ tag: castBigInt(unsignedInt), value });
|
||||
}
|
||||
}
|
||||
case majorUtf8String:
|
||||
case majorMap:
|
||||
case majorList:
|
||||
case majorUnstructuredByteString:
|
||||
if (minor === minorIndefinite) {
|
||||
switch (major) {
|
||||
case majorUtf8String:
|
||||
return decodeUtf8StringIndefinite(at, to);
|
||||
case majorMap:
|
||||
return decodeMapIndefinite(at, to);
|
||||
case majorList:
|
||||
return decodeListIndefinite(at, to);
|
||||
case majorUnstructuredByteString:
|
||||
return decodeUnstructuredByteStringIndefinite(at, to);
|
||||
}
|
||||
} else {
|
||||
switch (major) {
|
||||
case majorUtf8String:
|
||||
return decodeUtf8String(at, to);
|
||||
case majorMap:
|
||||
return decodeMap(at, to);
|
||||
case majorList:
|
||||
return decodeList(at, to);
|
||||
case majorUnstructuredByteString:
|
||||
return decodeUnstructuredByteString(at, to);
|
||||
}
|
||||
}
|
||||
default:
|
||||
return decodeSpecial(at, to);
|
||||
}
|
||||
}
|
||||
function bytesToUtf8(bytes, at, to) {
|
||||
if (USE_BUFFER && bytes.constructor?.name === "Buffer") {
|
||||
return bytes.toString("utf-8", at, to);
|
||||
}
|
||||
if (textDecoder) {
|
||||
return textDecoder.decode(bytes.subarray(at, to));
|
||||
}
|
||||
return (0, import_util_utf8.toUtf8)(bytes.subarray(at, to));
|
||||
}
|
||||
function demote(bigInteger) {
|
||||
const num = Number(bigInteger);
|
||||
if (num < Number.MIN_SAFE_INTEGER || Number.MAX_SAFE_INTEGER < num) {
|
||||
console.warn(new Error(`@smithy/core/cbor - truncating BigInt(${bigInteger}) to ${num} with loss of precision.`));
|
||||
}
|
||||
return num;
|
||||
}
|
||||
var minorValueToArgumentLength = {
|
||||
[extendedOneByte]: 1,
|
||||
[extendedFloat16]: 2,
|
||||
[extendedFloat32]: 4,
|
||||
[extendedFloat64]: 8
|
||||
};
|
||||
function bytesToFloat16(a, b) {
|
||||
const sign = a >> 7;
|
||||
const exponent = (a & 124) >> 2;
|
||||
const fraction = (a & 3) << 8 | b;
|
||||
const scalar = sign === 0 ? 1 : -1;
|
||||
let exponentComponent;
|
||||
let summation;
|
||||
if (exponent === 0) {
|
||||
if (fraction === 0) {
|
||||
return 0;
|
||||
} else {
|
||||
exponentComponent = Math.pow(2, 1 - 15);
|
||||
summation = 0;
|
||||
}
|
||||
} else if (exponent === 31) {
|
||||
if (fraction === 0) {
|
||||
return scalar * Infinity;
|
||||
} else {
|
||||
return NaN;
|
||||
}
|
||||
} else {
|
||||
exponentComponent = Math.pow(2, exponent - 15);
|
||||
summation = 1;
|
||||
}
|
||||
summation += fraction / 1024;
|
||||
return scalar * (exponentComponent * summation);
|
||||
}
|
||||
function decodeCount(at, to) {
|
||||
const minor = payload[at] & 31;
|
||||
if (minor < 24) {
|
||||
_offset = 1;
|
||||
return minor;
|
||||
}
|
||||
if (minor === extendedOneByte || minor === extendedFloat16 || minor === extendedFloat32 || minor === extendedFloat64) {
|
||||
const countLength = minorValueToArgumentLength[minor];
|
||||
_offset = countLength + 1;
|
||||
if (to - at < _offset) {
|
||||
throw new Error(`countLength ${countLength} greater than remaining buf len.`);
|
||||
}
|
||||
const countIndex = at + 1;
|
||||
if (countLength === 1) {
|
||||
return payload[countIndex];
|
||||
} else if (countLength === 2) {
|
||||
return dataView.getUint16(countIndex);
|
||||
} else if (countLength === 4) {
|
||||
return dataView.getUint32(countIndex);
|
||||
}
|
||||
return demote(dataView.getBigUint64(countIndex));
|
||||
}
|
||||
throw new Error(`unexpected minor value ${minor}.`);
|
||||
}
|
||||
function decodeUtf8String(at, to) {
|
||||
const length = decodeCount(at, to);
|
||||
const offset = _offset;
|
||||
at += offset;
|
||||
if (to - at < length) {
|
||||
throw new Error(`string len ${length} greater than remaining buf len.`);
|
||||
}
|
||||
const value = bytesToUtf8(payload, at, at + length);
|
||||
_offset = offset + length;
|
||||
return value;
|
||||
}
|
||||
function decodeUtf8StringIndefinite(at, to) {
|
||||
at += 1;
|
||||
const vector = [];
|
||||
for (const base = at; at < to; ) {
|
||||
if (payload[at] === 255) {
|
||||
const data2 = alloc(vector.length);
|
||||
data2.set(vector, 0);
|
||||
_offset = at - base + 2;
|
||||
return bytesToUtf8(data2, 0, data2.length);
|
||||
}
|
||||
const major = (payload[at] & 224) >> 5;
|
||||
const minor = payload[at] & 31;
|
||||
if (major !== majorUtf8String) {
|
||||
throw new Error(`unexpected major type ${major} in indefinite string.`);
|
||||
}
|
||||
if (minor === minorIndefinite) {
|
||||
throw new Error("nested indefinite string.");
|
||||
}
|
||||
const bytes = decodeUnstructuredByteString(at, to);
|
||||
const length = _offset;
|
||||
at += length;
|
||||
for (let i = 0; i < bytes.length; ++i) {
|
||||
vector.push(bytes[i]);
|
||||
}
|
||||
}
|
||||
throw new Error("expected break marker.");
|
||||
}
|
||||
function decodeUnstructuredByteString(at, to) {
|
||||
const length = decodeCount(at, to);
|
||||
const offset = _offset;
|
||||
at += offset;
|
||||
if (to - at < length) {
|
||||
throw new Error(`unstructured byte string len ${length} greater than remaining buf len.`);
|
||||
}
|
||||
const value = payload.subarray(at, at + length);
|
||||
_offset = offset + length;
|
||||
return value;
|
||||
}
|
||||
function decodeUnstructuredByteStringIndefinite(at, to) {
|
||||
at += 1;
|
||||
const vector = [];
|
||||
for (const base = at; at < to; ) {
|
||||
if (payload[at] === 255) {
|
||||
const data2 = alloc(vector.length);
|
||||
data2.set(vector, 0);
|
||||
_offset = at - base + 2;
|
||||
return data2;
|
||||
}
|
||||
const major = (payload[at] & 224) >> 5;
|
||||
const minor = payload[at] & 31;
|
||||
if (major !== majorUnstructuredByteString) {
|
||||
throw new Error(`unexpected major type ${major} in indefinite string.`);
|
||||
}
|
||||
if (minor === minorIndefinite) {
|
||||
throw new Error("nested indefinite string.");
|
||||
}
|
||||
const bytes = decodeUnstructuredByteString(at, to);
|
||||
const length = _offset;
|
||||
at += length;
|
||||
for (let i = 0; i < bytes.length; ++i) {
|
||||
vector.push(bytes[i]);
|
||||
}
|
||||
}
|
||||
throw new Error("expected break marker.");
|
||||
}
|
||||
function decodeList(at, to) {
|
||||
const listDataLength = decodeCount(at, to);
|
||||
const offset = _offset;
|
||||
at += offset;
|
||||
const base = at;
|
||||
const list = Array(listDataLength);
|
||||
for (let i = 0; i < listDataLength; ++i) {
|
||||
const item = decode(at, to);
|
||||
const itemOffset = _offset;
|
||||
list[i] = item;
|
||||
at += itemOffset;
|
||||
}
|
||||
_offset = offset + (at - base);
|
||||
return list;
|
||||
}
|
||||
function decodeListIndefinite(at, to) {
|
||||
at += 1;
|
||||
const list = [];
|
||||
for (const base = at; at < to; ) {
|
||||
if (payload[at] === 255) {
|
||||
_offset = at - base + 2;
|
||||
return list;
|
||||
}
|
||||
const item = decode(at, to);
|
||||
const n = _offset;
|
||||
at += n;
|
||||
list.push(item);
|
||||
}
|
||||
throw new Error("expected break marker.");
|
||||
}
|
||||
function decodeMap(at, to) {
|
||||
const mapDataLength = decodeCount(at, to);
|
||||
const offset = _offset;
|
||||
at += offset;
|
||||
const base = at;
|
||||
const map = {};
|
||||
for (let i = 0; i < mapDataLength; ++i) {
|
||||
if (at >= to) {
|
||||
throw new Error("unexpected end of map payload.");
|
||||
}
|
||||
const major = (payload[at] & 224) >> 5;
|
||||
if (major !== majorUtf8String) {
|
||||
throw new Error(`unexpected major type ${major} for map key at index ${at}.`);
|
||||
}
|
||||
const key = decode(at, to);
|
||||
at += _offset;
|
||||
const value = decode(at, to);
|
||||
at += _offset;
|
||||
map[key] = value;
|
||||
}
|
||||
_offset = offset + (at - base);
|
||||
return map;
|
||||
}
|
||||
function decodeMapIndefinite(at, to) {
|
||||
at += 1;
|
||||
const base = at;
|
||||
const map = {};
|
||||
for (; at < to; ) {
|
||||
if (at >= to) {
|
||||
throw new Error("unexpected end of map payload.");
|
||||
}
|
||||
if (payload[at] === 255) {
|
||||
_offset = at - base + 2;
|
||||
return map;
|
||||
}
|
||||
const major = (payload[at] & 224) >> 5;
|
||||
if (major !== majorUtf8String) {
|
||||
throw new Error(`unexpected major type ${major} for map key.`);
|
||||
}
|
||||
const key = decode(at, to);
|
||||
at += _offset;
|
||||
const value = decode(at, to);
|
||||
at += _offset;
|
||||
map[key] = value;
|
||||
}
|
||||
throw new Error("expected break marker.");
|
||||
}
|
||||
function decodeSpecial(at, to) {
|
||||
const minor = payload[at] & 31;
|
||||
switch (minor) {
|
||||
case specialTrue:
|
||||
case specialFalse:
|
||||
_offset = 1;
|
||||
return minor === specialTrue;
|
||||
case specialNull:
|
||||
_offset = 1;
|
||||
return null;
|
||||
case specialUndefined:
|
||||
_offset = 1;
|
||||
return null;
|
||||
case extendedFloat16:
|
||||
if (to - at < 3) {
|
||||
throw new Error("incomplete float16 at end of buf.");
|
||||
}
|
||||
_offset = 3;
|
||||
return bytesToFloat16(payload[at + 1], payload[at + 2]);
|
||||
case extendedFloat32:
|
||||
if (to - at < 5) {
|
||||
throw new Error("incomplete float32 at end of buf.");
|
||||
}
|
||||
_offset = 5;
|
||||
return dataView.getFloat32(at + 1);
|
||||
case extendedFloat64:
|
||||
if (to - at < 9) {
|
||||
throw new Error("incomplete float64 at end of buf.");
|
||||
}
|
||||
_offset = 9;
|
||||
return dataView.getFloat64(at + 1);
|
||||
default:
|
||||
throw new Error(`unexpected minor value ${minor}.`);
|
||||
}
|
||||
}
|
||||
function castBigInt(bigInt) {
|
||||
if (typeof bigInt === "number") {
|
||||
return bigInt;
|
||||
}
|
||||
const num = Number(bigInt);
|
||||
if (Number.MIN_SAFE_INTEGER <= num && num <= Number.MAX_SAFE_INTEGER) {
|
||||
return num;
|
||||
}
|
||||
return bigInt;
|
||||
}
|
||||
|
||||
// src/submodules/cbor/cbor-encode.ts
|
||||
var import_serde2 = require("@smithy/core/serde");
|
||||
var import_util_utf82 = require("@smithy/util-utf8");
|
||||
var USE_BUFFER2 = typeof Buffer !== "undefined";
|
||||
var initialSize = 2048;
|
||||
var data = alloc(initialSize);
|
||||
var dataView2 = new DataView(data.buffer, data.byteOffset, data.byteLength);
|
||||
var cursor = 0;
|
||||
function ensureSpace(bytes) {
|
||||
const remaining = data.byteLength - cursor;
|
||||
if (remaining < bytes) {
|
||||
if (cursor < 16e6) {
|
||||
resize(Math.max(data.byteLength * 4, data.byteLength + bytes));
|
||||
} else {
|
||||
resize(data.byteLength + bytes + 16e6);
|
||||
}
|
||||
}
|
||||
}
|
||||
function toUint8Array() {
|
||||
const out = alloc(cursor);
|
||||
out.set(data.subarray(0, cursor), 0);
|
||||
cursor = 0;
|
||||
return out;
|
||||
}
|
||||
function resize(size) {
|
||||
const old = data;
|
||||
data = alloc(size);
|
||||
if (old) {
|
||||
if (old.copy) {
|
||||
old.copy(data, 0, 0, old.byteLength);
|
||||
} else {
|
||||
data.set(old, 0);
|
||||
}
|
||||
}
|
||||
dataView2 = new DataView(data.buffer, data.byteOffset, data.byteLength);
|
||||
}
|
||||
function encodeHeader(major, value) {
|
||||
if (value < 24) {
|
||||
data[cursor++] = major << 5 | value;
|
||||
} else if (value < 1 << 8) {
|
||||
data[cursor++] = major << 5 | 24;
|
||||
data[cursor++] = value;
|
||||
} else if (value < 1 << 16) {
|
||||
data[cursor++] = major << 5 | extendedFloat16;
|
||||
dataView2.setUint16(cursor, value);
|
||||
cursor += 2;
|
||||
} else if (value < 2 ** 32) {
|
||||
data[cursor++] = major << 5 | extendedFloat32;
|
||||
dataView2.setUint32(cursor, value);
|
||||
cursor += 4;
|
||||
} else {
|
||||
data[cursor++] = major << 5 | extendedFloat64;
|
||||
dataView2.setBigUint64(cursor, typeof value === "bigint" ? value : BigInt(value));
|
||||
cursor += 8;
|
||||
}
|
||||
}
|
||||
function encode(_input) {
|
||||
const encodeStack = [_input];
|
||||
while (encodeStack.length) {
|
||||
const input = encodeStack.pop();
|
||||
ensureSpace(typeof input === "string" ? input.length * 4 : 64);
|
||||
if (typeof input === "string") {
|
||||
if (USE_BUFFER2) {
|
||||
encodeHeader(majorUtf8String, Buffer.byteLength(input));
|
||||
cursor += data.write(input, cursor);
|
||||
} else {
|
||||
const bytes = (0, import_util_utf82.fromUtf8)(input);
|
||||
encodeHeader(majorUtf8String, bytes.byteLength);
|
||||
data.set(bytes, cursor);
|
||||
cursor += bytes.byteLength;
|
||||
}
|
||||
continue;
|
||||
} else if (typeof input === "number") {
|
||||
if (Number.isInteger(input)) {
|
||||
const nonNegative = input >= 0;
|
||||
const major = nonNegative ? majorUint64 : majorNegativeInt64;
|
||||
const value = nonNegative ? input : -input - 1;
|
||||
if (value < 24) {
|
||||
data[cursor++] = major << 5 | value;
|
||||
} else if (value < 256) {
|
||||
data[cursor++] = major << 5 | 24;
|
||||
data[cursor++] = value;
|
||||
} else if (value < 65536) {
|
||||
data[cursor++] = major << 5 | extendedFloat16;
|
||||
data[cursor++] = value >> 8;
|
||||
data[cursor++] = value;
|
||||
} else if (value < 4294967296) {
|
||||
data[cursor++] = major << 5 | extendedFloat32;
|
||||
dataView2.setUint32(cursor, value);
|
||||
cursor += 4;
|
||||
} else {
|
||||
data[cursor++] = major << 5 | extendedFloat64;
|
||||
dataView2.setBigUint64(cursor, BigInt(value));
|
||||
cursor += 8;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
data[cursor++] = majorSpecial << 5 | extendedFloat64;
|
||||
dataView2.setFloat64(cursor, input);
|
||||
cursor += 8;
|
||||
continue;
|
||||
} else if (typeof input === "bigint") {
|
||||
const nonNegative = input >= 0;
|
||||
const major = nonNegative ? majorUint64 : majorNegativeInt64;
|
||||
const value = nonNegative ? input : -input - BigInt(1);
|
||||
const n = Number(value);
|
||||
if (n < 24) {
|
||||
data[cursor++] = major << 5 | n;
|
||||
} else if (n < 256) {
|
||||
data[cursor++] = major << 5 | 24;
|
||||
data[cursor++] = n;
|
||||
} else if (n < 65536) {
|
||||
data[cursor++] = major << 5 | extendedFloat16;
|
||||
data[cursor++] = n >> 8;
|
||||
data[cursor++] = n & 255;
|
||||
} else if (n < 4294967296) {
|
||||
data[cursor++] = major << 5 | extendedFloat32;
|
||||
dataView2.setUint32(cursor, n);
|
||||
cursor += 4;
|
||||
} else if (value < BigInt("18446744073709551616")) {
|
||||
data[cursor++] = major << 5 | extendedFloat64;
|
||||
dataView2.setBigUint64(cursor, value);
|
||||
cursor += 8;
|
||||
} else {
|
||||
const binaryBigInt = value.toString(2);
|
||||
const bigIntBytes = new Uint8Array(Math.ceil(binaryBigInt.length / 8));
|
||||
let b = value;
|
||||
let i = 0;
|
||||
while (bigIntBytes.byteLength - ++i >= 0) {
|
||||
bigIntBytes[bigIntBytes.byteLength - i] = Number(b & BigInt(255));
|
||||
b >>= BigInt(8);
|
||||
}
|
||||
ensureSpace(bigIntBytes.byteLength * 2);
|
||||
data[cursor++] = nonNegative ? 194 : 195;
|
||||
if (USE_BUFFER2) {
|
||||
encodeHeader(majorUnstructuredByteString, Buffer.byteLength(bigIntBytes));
|
||||
} else {
|
||||
encodeHeader(majorUnstructuredByteString, bigIntBytes.byteLength);
|
||||
}
|
||||
data.set(bigIntBytes, cursor);
|
||||
cursor += bigIntBytes.byteLength;
|
||||
}
|
||||
continue;
|
||||
} else if (input === null) {
|
||||
data[cursor++] = majorSpecial << 5 | specialNull;
|
||||
continue;
|
||||
} else if (typeof input === "boolean") {
|
||||
data[cursor++] = majorSpecial << 5 | (input ? specialTrue : specialFalse);
|
||||
continue;
|
||||
} else if (typeof input === "undefined") {
|
||||
throw new Error("@smithy/core/cbor: client may not serialize undefined value.");
|
||||
} else if (Array.isArray(input)) {
|
||||
for (let i = input.length - 1; i >= 0; --i) {
|
||||
encodeStack.push(input[i]);
|
||||
}
|
||||
encodeHeader(majorList, input.length);
|
||||
continue;
|
||||
} else if (typeof input.byteLength === "number") {
|
||||
ensureSpace(input.length * 2);
|
||||
encodeHeader(majorUnstructuredByteString, input.length);
|
||||
data.set(input, cursor);
|
||||
cursor += input.byteLength;
|
||||
continue;
|
||||
} else if (typeof input === "object") {
|
||||
if (input instanceof import_serde2.NumericValue) {
|
||||
const decimalIndex = input.string.indexOf(".");
|
||||
const exponent = decimalIndex === -1 ? 0 : decimalIndex - input.string.length + 1;
|
||||
const mantissa = BigInt(input.string.replace(".", ""));
|
||||
data[cursor++] = 196;
|
||||
encodeStack.push(mantissa);
|
||||
encodeStack.push(exponent);
|
||||
encodeHeader(majorList, 2);
|
||||
continue;
|
||||
}
|
||||
if (input[tagSymbol]) {
|
||||
if ("tag" in input && "value" in input) {
|
||||
encodeStack.push(input.value);
|
||||
encodeHeader(majorTag, input.tag);
|
||||
continue;
|
||||
} else {
|
||||
throw new Error(
|
||||
"tag encountered with missing fields, need 'tag' and 'value', found: " + JSON.stringify(input)
|
||||
);
|
||||
}
|
||||
}
|
||||
const keys = Object.keys(input);
|
||||
for (let i = keys.length - 1; i >= 0; --i) {
|
||||
const key = keys[i];
|
||||
encodeStack.push(input[key]);
|
||||
encodeStack.push(key);
|
||||
}
|
||||
encodeHeader(majorMap, keys.length);
|
||||
continue;
|
||||
}
|
||||
throw new Error(`data type ${input?.constructor?.name ?? typeof input} not compatible for encoding.`);
|
||||
}
|
||||
}
|
||||
|
||||
// src/submodules/cbor/cbor.ts
|
||||
var cbor = {
|
||||
deserialize(payload2) {
|
||||
setPayload(payload2);
|
||||
return decode(0, payload2.length);
|
||||
},
|
||||
serialize(input) {
|
||||
try {
|
||||
encode(input);
|
||||
return toUint8Array();
|
||||
} catch (e) {
|
||||
toUint8Array();
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
/**
|
||||
* @public
|
||||
* @param size - byte length to allocate.
|
||||
*
|
||||
* This may be used to garbage collect the CBOR
|
||||
* shared encoding buffer space,
|
||||
* e.g. resizeEncodingBuffer(0);
|
||||
*
|
||||
* This may also be used to pre-allocate more space for
|
||||
* CBOR encoding, e.g. resizeEncodingBuffer(100_000_000);
|
||||
*/
|
||||
resizeEncodingBuffer(size) {
|
||||
resize(size);
|
||||
}
|
||||
};
|
||||
|
||||
// src/submodules/cbor/parseCborBody.ts
|
||||
var import_protocols = require("@smithy/core/protocols");
|
||||
var import_protocol_http = require("@smithy/protocol-http");
|
||||
var import_util_body_length_browser = require("@smithy/util-body-length-browser");
|
||||
var parseCborBody = (streamBody, context) => {
|
||||
return (0, import_protocols.collectBody)(streamBody, context).then(async (bytes) => {
|
||||
if (bytes.length) {
|
||||
try {
|
||||
return cbor.deserialize(bytes);
|
||||
} catch (e) {
|
||||
Object.defineProperty(e, "$responseBodyText", {
|
||||
value: context.utf8Encoder(bytes)
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
return {};
|
||||
});
|
||||
};
|
||||
var dateToTag = (date) => {
|
||||
return tag({
|
||||
tag: 1,
|
||||
value: date.getTime() / 1e3
|
||||
});
|
||||
};
|
||||
var parseCborErrorBody = async (errorBody, context) => {
|
||||
const value = await parseCborBody(errorBody, context);
|
||||
value.message = value.message ?? value.Message;
|
||||
return value;
|
||||
};
|
||||
var loadSmithyRpcV2CborErrorCode = (output, data2) => {
|
||||
const sanitizeErrorCode = (rawValue) => {
|
||||
let cleanValue = rawValue;
|
||||
if (typeof cleanValue === "number") {
|
||||
cleanValue = cleanValue.toString();
|
||||
}
|
||||
if (cleanValue.indexOf(",") >= 0) {
|
||||
cleanValue = cleanValue.split(",")[0];
|
||||
}
|
||||
if (cleanValue.indexOf(":") >= 0) {
|
||||
cleanValue = cleanValue.split(":")[0];
|
||||
}
|
||||
if (cleanValue.indexOf("#") >= 0) {
|
||||
cleanValue = cleanValue.split("#")[1];
|
||||
}
|
||||
return cleanValue;
|
||||
};
|
||||
if (data2["__type"] !== void 0) {
|
||||
return sanitizeErrorCode(data2["__type"]);
|
||||
}
|
||||
const codeKey = Object.keys(data2).find((key) => key.toLowerCase() === "code");
|
||||
if (codeKey && data2[codeKey] !== void 0) {
|
||||
return sanitizeErrorCode(data2[codeKey]);
|
||||
}
|
||||
};
|
||||
var checkCborResponse = (response) => {
|
||||
if (String(response.headers["smithy-protocol"]).toLowerCase() !== "rpc-v2-cbor") {
|
||||
throw new Error("Malformed RPCv2 CBOR response, status: " + response.statusCode);
|
||||
}
|
||||
};
|
||||
var buildHttpRpcRequest = async (context, headers, path, resolvedHostname, body) => {
|
||||
const { hostname, protocol = "https", port, path: basePath } = await context.endpoint();
|
||||
const contents = {
|
||||
protocol,
|
||||
hostname,
|
||||
port,
|
||||
method: "POST",
|
||||
path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path,
|
||||
headers: {
|
||||
// intentional copy.
|
||||
...headers
|
||||
}
|
||||
};
|
||||
if (resolvedHostname !== void 0) {
|
||||
contents.hostname = resolvedHostname;
|
||||
}
|
||||
if (body !== void 0) {
|
||||
contents.body = body;
|
||||
try {
|
||||
contents.headers["content-length"] = String((0, import_util_body_length_browser.calculateBodyLength)(body));
|
||||
} catch (e) {
|
||||
}
|
||||
}
|
||||
return new import_protocol_http.HttpRequest(contents);
|
||||
};
|
||||
|
||||
// src/submodules/cbor/SmithyRpcV2CborProtocol.ts
|
||||
var import_protocols2 = require("@smithy/core/protocols");
|
||||
var import_schema2 = require("@smithy/core/schema");
|
||||
var import_util_middleware = require("@smithy/util-middleware");
|
||||
|
||||
// src/submodules/cbor/CborCodec.ts
|
||||
var import_schema = require("@smithy/core/schema");
|
||||
var import_serde3 = require("@smithy/core/serde");
|
||||
var CborCodec = class {
|
||||
createSerializer() {
|
||||
const serializer = new CborShapeSerializer();
|
||||
serializer.setSerdeContext(this.serdeContext);
|
||||
return serializer;
|
||||
}
|
||||
createDeserializer() {
|
||||
const deserializer = new CborShapeDeserializer();
|
||||
deserializer.setSerdeContext(this.serdeContext);
|
||||
return deserializer;
|
||||
}
|
||||
setSerdeContext(serdeContext) {
|
||||
this.serdeContext = serdeContext;
|
||||
}
|
||||
};
|
||||
var CborShapeSerializer = class {
|
||||
setSerdeContext(serdeContext) {
|
||||
this.serdeContext = serdeContext;
|
||||
}
|
||||
write(schema, value) {
|
||||
this.value = (0, import_serde3.copyDocumentWithTransform)(value, schema, (_, schemaRef) => {
|
||||
if (_ instanceof Date) {
|
||||
return dateToTag(_);
|
||||
}
|
||||
if (_ instanceof Uint8Array) {
|
||||
return _;
|
||||
}
|
||||
const ns = import_schema.NormalizedSchema.of(schemaRef);
|
||||
const sparse = !!ns.getMergedTraits().sparse;
|
||||
if (ns.isListSchema() && Array.isArray(_)) {
|
||||
if (!sparse) {
|
||||
return _.filter((item) => item != null);
|
||||
}
|
||||
} else if (_ && typeof _ === "object") {
|
||||
const members = ns.getMemberSchemas();
|
||||
const isStruct = ns.isStructSchema();
|
||||
if (!sparse || isStruct) {
|
||||
for (const [k, v] of Object.entries(_)) {
|
||||
const filteredOutByNonSparse = !sparse && v == null;
|
||||
const filteredOutByUnrecognizedMember = isStruct && !(k in members);
|
||||
if (filteredOutByNonSparse || filteredOutByUnrecognizedMember) {
|
||||
delete _[k];
|
||||
}
|
||||
}
|
||||
return _;
|
||||
}
|
||||
}
|
||||
return _;
|
||||
});
|
||||
}
|
||||
flush() {
|
||||
const buffer = cbor.serialize(this.value);
|
||||
this.value = void 0;
|
||||
return buffer;
|
||||
}
|
||||
};
|
||||
var CborShapeDeserializer = class {
|
||||
setSerdeContext(serdeContext) {
|
||||
this.serdeContext = serdeContext;
|
||||
}
|
||||
read(schema, bytes) {
|
||||
const data2 = cbor.deserialize(bytes);
|
||||
return this.readValue(schema, data2);
|
||||
}
|
||||
readValue(_schema, value) {
|
||||
const ns = import_schema.NormalizedSchema.of(_schema);
|
||||
const schema = ns.getSchema();
|
||||
if (typeof schema === "number") {
|
||||
if (ns.isTimestampSchema()) {
|
||||
return (0, import_serde3.parseEpochTimestamp)(value);
|
||||
}
|
||||
if (ns.isBlobSchema()) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
if (typeof value === "undefined" || typeof value === "boolean" || typeof value === "number" || typeof value === "string" || typeof value === "bigint" || typeof value === "symbol") {
|
||||
return value;
|
||||
} else if (typeof value === "function" || typeof value === "object") {
|
||||
if (value === null) {
|
||||
return null;
|
||||
}
|
||||
if ("byteLength" in value) {
|
||||
return value;
|
||||
}
|
||||
if (value instanceof Date) {
|
||||
return value;
|
||||
}
|
||||
if (ns.isDocumentSchema()) {
|
||||
return value;
|
||||
}
|
||||
if (ns.isListSchema()) {
|
||||
const newArray = [];
|
||||
const memberSchema = ns.getValueSchema();
|
||||
const sparse = ns.isListSchema() && !!ns.getMergedTraits().sparse;
|
||||
for (const item of value) {
|
||||
newArray.push(this.readValue(memberSchema, item));
|
||||
if (!sparse && newArray[newArray.length - 1] == null) {
|
||||
newArray.pop();
|
||||
}
|
||||
}
|
||||
return newArray;
|
||||
}
|
||||
const newObject = {};
|
||||
if (ns.isMapSchema()) {
|
||||
const sparse = ns.getMergedTraits().sparse;
|
||||
const targetSchema = ns.getValueSchema();
|
||||
for (const key of Object.keys(value)) {
|
||||
newObject[key] = this.readValue(targetSchema, value[key]);
|
||||
if (newObject[key] == null && !sparse) {
|
||||
delete newObject[key];
|
||||
}
|
||||
}
|
||||
} else if (ns.isStructSchema()) {
|
||||
for (const [key, memberSchema] of ns.structIterator()) {
|
||||
newObject[key] = this.readValue(memberSchema, value[key]);
|
||||
}
|
||||
}
|
||||
return newObject;
|
||||
} else {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// src/submodules/cbor/SmithyRpcV2CborProtocol.ts
|
||||
var SmithyRpcV2CborProtocol = class extends import_protocols2.RpcProtocol {
|
||||
constructor({ defaultNamespace }) {
|
||||
super({ defaultNamespace });
|
||||
this.codec = new CborCodec();
|
||||
this.serializer = this.codec.createSerializer();
|
||||
this.deserializer = this.codec.createDeserializer();
|
||||
}
|
||||
getShapeId() {
|
||||
return "smithy.protocols#rpcv2Cbor";
|
||||
}
|
||||
getPayloadCodec() {
|
||||
return this.codec;
|
||||
}
|
||||
async serializeRequest(operationSchema, input, context) {
|
||||
const request = await super.serializeRequest(operationSchema, input, context);
|
||||
Object.assign(request.headers, {
|
||||
"content-type": "application/cbor",
|
||||
"smithy-protocol": "rpc-v2-cbor",
|
||||
accept: "application/cbor"
|
||||
});
|
||||
if ((0, import_schema2.deref)(operationSchema.input) === "unit") {
|
||||
delete request.body;
|
||||
delete request.headers["content-type"];
|
||||
} else {
|
||||
if (!request.body) {
|
||||
this.serializer.write(15, {});
|
||||
request.body = this.serializer.flush();
|
||||
}
|
||||
try {
|
||||
request.headers["content-length"] = String(request.body.byteLength);
|
||||
} catch (e) {
|
||||
}
|
||||
}
|
||||
const { service, operation } = (0, import_util_middleware.getSmithyContext)(context);
|
||||
const path = `/service/${service}/operation/${operation}`;
|
||||
if (request.path.endsWith("/")) {
|
||||
request.path += path.slice(1);
|
||||
} else {
|
||||
request.path += path;
|
||||
}
|
||||
return request;
|
||||
}
|
||||
async deserializeResponse(operationSchema, context, response) {
|
||||
return super.deserializeResponse(operationSchema, context, response);
|
||||
}
|
||||
async handleError(operationSchema, context, response, dataObject, metadata) {
|
||||
const error = loadSmithyRpcV2CborErrorCode(response, dataObject) ?? "Unknown";
|
||||
let namespace = this.options.defaultNamespace;
|
||||
if (error.includes("#")) {
|
||||
[namespace] = error.split("#");
|
||||
}
|
||||
const registry = import_schema2.TypeRegistry.for(namespace);
|
||||
const errorSchema = registry.getSchema(error);
|
||||
if (!errorSchema) {
|
||||
throw new Error("schema not found for " + error);
|
||||
}
|
||||
const message = dataObject.message ?? dataObject.Message ?? "Unknown";
|
||||
const exception = new errorSchema.ctor(message);
|
||||
Object.assign(exception, {
|
||||
$metadata: metadata,
|
||||
$response: response,
|
||||
message,
|
||||
...dataObject
|
||||
});
|
||||
throw exception;
|
||||
}
|
||||
};
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
CborCodec,
|
||||
CborShapeDeserializer,
|
||||
CborShapeSerializer,
|
||||
SmithyRpcV2CborProtocol,
|
||||
buildHttpRpcRequest,
|
||||
cbor,
|
||||
checkCborResponse,
|
||||
dateToTag,
|
||||
loadSmithyRpcV2CborErrorCode,
|
||||
parseCborBody,
|
||||
parseCborErrorBody,
|
||||
tag,
|
||||
tagSymbol
|
||||
});
|
||||
Generated
Vendored
+830
@@ -0,0 +1,830 @@
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
|
||||
// src/submodules/protocols/index.ts
|
||||
var protocols_exports = {};
|
||||
__export(protocols_exports, {
|
||||
FromStringShapeDeserializer: () => FromStringShapeDeserializer,
|
||||
HttpBindingProtocol: () => HttpBindingProtocol,
|
||||
HttpInterceptingShapeDeserializer: () => HttpInterceptingShapeDeserializer,
|
||||
HttpInterceptingShapeSerializer: () => HttpInterceptingShapeSerializer,
|
||||
RequestBuilder: () => RequestBuilder,
|
||||
RpcProtocol: () => RpcProtocol,
|
||||
ToStringShapeSerializer: () => ToStringShapeSerializer,
|
||||
collectBody: () => collectBody,
|
||||
determineTimestampFormat: () => determineTimestampFormat,
|
||||
extendedEncodeURIComponent: () => extendedEncodeURIComponent,
|
||||
requestBuilder: () => requestBuilder,
|
||||
resolvedPath: () => resolvedPath
|
||||
});
|
||||
module.exports = __toCommonJS(protocols_exports);
|
||||
|
||||
// src/submodules/protocols/collect-stream-body.ts
|
||||
var import_util_stream = require("@smithy/util-stream");
|
||||
var collectBody = async (streamBody = new Uint8Array(), context) => {
|
||||
if (streamBody instanceof Uint8Array) {
|
||||
return import_util_stream.Uint8ArrayBlobAdapter.mutate(streamBody);
|
||||
}
|
||||
if (!streamBody) {
|
||||
return import_util_stream.Uint8ArrayBlobAdapter.mutate(new Uint8Array());
|
||||
}
|
||||
const fromContext = context.streamCollector(streamBody);
|
||||
return import_util_stream.Uint8ArrayBlobAdapter.mutate(await fromContext);
|
||||
};
|
||||
|
||||
// src/submodules/protocols/extended-encode-uri-component.ts
|
||||
function extendedEncodeURIComponent(str) {
|
||||
return encodeURIComponent(str).replace(/[!'()*]/g, function(c) {
|
||||
return "%" + c.charCodeAt(0).toString(16).toUpperCase();
|
||||
});
|
||||
}
|
||||
|
||||
// src/submodules/protocols/HttpBindingProtocol.ts
|
||||
var import_schema2 = require("@smithy/core/schema");
|
||||
var import_protocol_http2 = require("@smithy/protocol-http");
|
||||
|
||||
// src/submodules/protocols/HttpProtocol.ts
|
||||
var import_schema = require("@smithy/core/schema");
|
||||
var import_serde = require("@smithy/core/serde");
|
||||
var import_protocol_http = require("@smithy/protocol-http");
|
||||
var import_util_stream2 = require("@smithy/util-stream");
|
||||
var HttpProtocol = class {
|
||||
constructor(options) {
|
||||
this.options = options;
|
||||
}
|
||||
getRequestType() {
|
||||
return import_protocol_http.HttpRequest;
|
||||
}
|
||||
getResponseType() {
|
||||
return import_protocol_http.HttpResponse;
|
||||
}
|
||||
setSerdeContext(serdeContext) {
|
||||
this.serdeContext = serdeContext;
|
||||
this.serializer.setSerdeContext(serdeContext);
|
||||
this.deserializer.setSerdeContext(serdeContext);
|
||||
if (this.getPayloadCodec()) {
|
||||
this.getPayloadCodec().setSerdeContext(serdeContext);
|
||||
}
|
||||
}
|
||||
updateServiceEndpoint(request, endpoint) {
|
||||
if ("url" in endpoint) {
|
||||
request.protocol = endpoint.url.protocol;
|
||||
request.hostname = endpoint.url.hostname;
|
||||
request.port = endpoint.url.port ? Number(endpoint.url.port) : void 0;
|
||||
request.path = endpoint.url.pathname;
|
||||
request.fragment = endpoint.url.hash || void 0;
|
||||
request.username = endpoint.url.username || void 0;
|
||||
request.password = endpoint.url.password || void 0;
|
||||
for (const [k, v] of endpoint.url.searchParams.entries()) {
|
||||
if (!request.query) {
|
||||
request.query = {};
|
||||
}
|
||||
request.query[k] = v;
|
||||
}
|
||||
return request;
|
||||
} else {
|
||||
request.protocol = endpoint.protocol;
|
||||
request.hostname = endpoint.hostname;
|
||||
request.port = endpoint.port ? Number(endpoint.port) : void 0;
|
||||
request.path = endpoint.path;
|
||||
request.query = {
|
||||
...endpoint.query
|
||||
};
|
||||
return request;
|
||||
}
|
||||
}
|
||||
setHostPrefix(request, operationSchema, input) {
|
||||
const operationNs = import_schema.NormalizedSchema.of(operationSchema);
|
||||
const inputNs = import_schema.NormalizedSchema.of(operationSchema.input);
|
||||
if (operationNs.getMergedTraits().endpoint) {
|
||||
let hostPrefix = operationNs.getMergedTraits().endpoint?.[0];
|
||||
if (typeof hostPrefix === "string") {
|
||||
const hostLabelInputs = [...inputNs.structIterator()].filter(
|
||||
([, member]) => member.getMergedTraits().hostLabel
|
||||
);
|
||||
for (const [name] of hostLabelInputs) {
|
||||
const replacement = input[name];
|
||||
if (typeof replacement !== "string") {
|
||||
throw new Error(`@smithy/core/schema - ${name} in input must be a string as hostLabel.`);
|
||||
}
|
||||
hostPrefix = hostPrefix.replace(`{${name}}`, replacement);
|
||||
}
|
||||
request.hostname = hostPrefix + request.hostname;
|
||||
}
|
||||
}
|
||||
}
|
||||
deserializeMetadata(output) {
|
||||
return {
|
||||
httpStatusCode: output.statusCode,
|
||||
requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"],
|
||||
extendedRequestId: output.headers["x-amz-id-2"],
|
||||
cfId: output.headers["x-amz-cf-id"]
|
||||
};
|
||||
}
|
||||
async deserializeHttpMessage(schema, context, response, arg4, arg5) {
|
||||
let dataObject;
|
||||
if (arg4 instanceof Set) {
|
||||
dataObject = arg5;
|
||||
} else {
|
||||
dataObject = arg4;
|
||||
}
|
||||
const deserializer = this.deserializer;
|
||||
const ns = import_schema.NormalizedSchema.of(schema);
|
||||
const nonHttpBindingMembers = [];
|
||||
for (const [memberName, memberSchema] of ns.structIterator()) {
|
||||
const memberTraits = memberSchema.getMemberTraits();
|
||||
if (memberTraits.httpPayload) {
|
||||
const isStreaming = memberSchema.isStreaming();
|
||||
if (isStreaming) {
|
||||
const isEventStream = memberSchema.isStructSchema();
|
||||
if (isEventStream) {
|
||||
const context2 = this.serdeContext;
|
||||
if (!context2.eventStreamMarshaller) {
|
||||
throw new Error("@smithy/core - HttpProtocol: eventStreamMarshaller missing in serdeContext.");
|
||||
}
|
||||
const memberSchemas = memberSchema.getMemberSchemas();
|
||||
dataObject[memberName] = context2.eventStreamMarshaller.deserialize(response.body, async (event) => {
|
||||
const unionMember = Object.keys(event).find((key) => {
|
||||
return key !== "__type";
|
||||
}) ?? "";
|
||||
if (unionMember in memberSchemas) {
|
||||
const eventStreamSchema = memberSchemas[unionMember];
|
||||
return {
|
||||
[unionMember]: await deserializer.read(eventStreamSchema, event[unionMember].body)
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
$unknown: event
|
||||
};
|
||||
}
|
||||
});
|
||||
} else {
|
||||
dataObject[memberName] = (0, import_util_stream2.sdkStreamMixin)(response.body);
|
||||
}
|
||||
} else if (response.body) {
|
||||
const bytes = await collectBody(response.body, context);
|
||||
if (bytes.byteLength > 0) {
|
||||
dataObject[memberName] = await deserializer.read(memberSchema, bytes);
|
||||
}
|
||||
}
|
||||
} else if (memberTraits.httpHeader) {
|
||||
const key = String(memberTraits.httpHeader).toLowerCase();
|
||||
const value = response.headers[key];
|
||||
if (null != value) {
|
||||
if (memberSchema.isListSchema()) {
|
||||
const headerListValueSchema = memberSchema.getValueSchema();
|
||||
let sections;
|
||||
if (headerListValueSchema.isTimestampSchema() && headerListValueSchema.getSchema() === import_schema.SCHEMA.TIMESTAMP_DEFAULT) {
|
||||
sections = (0, import_serde.splitEvery)(value, ",", 2);
|
||||
} else {
|
||||
sections = (0, import_serde.splitHeader)(value);
|
||||
}
|
||||
const list = [];
|
||||
for (const section of sections) {
|
||||
list.push(await deserializer.read([headerListValueSchema, { httpHeader: key }], section.trim()));
|
||||
}
|
||||
dataObject[memberName] = list;
|
||||
} else {
|
||||
dataObject[memberName] = await deserializer.read(memberSchema, value);
|
||||
}
|
||||
}
|
||||
} else if (memberTraits.httpPrefixHeaders !== void 0) {
|
||||
dataObject[memberName] = {};
|
||||
for (const [header, value] of Object.entries(response.headers)) {
|
||||
if (header.startsWith(memberTraits.httpPrefixHeaders)) {
|
||||
dataObject[memberName][header.slice(memberTraits.httpPrefixHeaders.length)] = await deserializer.read(
|
||||
[memberSchema.getValueSchema(), { httpHeader: header }],
|
||||
value
|
||||
);
|
||||
}
|
||||
}
|
||||
} else if (memberTraits.httpResponseCode) {
|
||||
dataObject[memberName] = response.statusCode;
|
||||
} else {
|
||||
nonHttpBindingMembers.push(memberName);
|
||||
}
|
||||
}
|
||||
return nonHttpBindingMembers;
|
||||
}
|
||||
};
|
||||
|
||||
// src/submodules/protocols/HttpBindingProtocol.ts
|
||||
var HttpBindingProtocol = class extends HttpProtocol {
|
||||
async serializeRequest(operationSchema, input, context) {
|
||||
const serializer = this.serializer;
|
||||
const query = {};
|
||||
const headers = {};
|
||||
const endpoint = await context.endpoint();
|
||||
const ns = import_schema2.NormalizedSchema.of(operationSchema?.input);
|
||||
const schema = ns.getSchema();
|
||||
let hasNonHttpBindingMember = false;
|
||||
let payload;
|
||||
const request = new import_protocol_http2.HttpRequest({
|
||||
protocol: "",
|
||||
hostname: "",
|
||||
port: void 0,
|
||||
path: "",
|
||||
fragment: void 0,
|
||||
query,
|
||||
headers,
|
||||
body: void 0
|
||||
});
|
||||
if (endpoint) {
|
||||
this.updateServiceEndpoint(request, endpoint);
|
||||
this.setHostPrefix(request, operationSchema, input);
|
||||
const opTraits = import_schema2.NormalizedSchema.translateTraits(operationSchema.traits);
|
||||
if (opTraits.http) {
|
||||
request.method = opTraits.http[0];
|
||||
const [path, search] = opTraits.http[1].split("?");
|
||||
if (request.path == "/") {
|
||||
request.path = path;
|
||||
} else {
|
||||
request.path += path;
|
||||
}
|
||||
const traitSearchParams = new URLSearchParams(search ?? "");
|
||||
Object.assign(query, Object.fromEntries(traitSearchParams));
|
||||
}
|
||||
}
|
||||
const _input = {
|
||||
...input
|
||||
};
|
||||
for (const memberName of Object.keys(_input)) {
|
||||
const memberNs = ns.getMemberSchema(memberName);
|
||||
if (memberNs === void 0) {
|
||||
continue;
|
||||
}
|
||||
const memberTraits = memberNs.getMergedTraits();
|
||||
const inputMember = _input[memberName];
|
||||
if (memberTraits.httpPayload) {
|
||||
const isStreaming = memberNs.isStreaming();
|
||||
if (isStreaming) {
|
||||
const isEventStream = memberNs.isStructSchema();
|
||||
if (isEventStream) {
|
||||
throw new Error("serialization of event streams is not yet implemented");
|
||||
} else {
|
||||
payload = inputMember;
|
||||
}
|
||||
} else {
|
||||
serializer.write(memberNs, inputMember);
|
||||
payload = serializer.flush();
|
||||
}
|
||||
} else if (memberTraits.httpLabel) {
|
||||
serializer.write(memberNs, inputMember);
|
||||
const replacement = serializer.flush();
|
||||
if (request.path.includes(`{${memberName}+}`)) {
|
||||
request.path = request.path.replace(
|
||||
`{${memberName}+}`,
|
||||
replacement.split("/").map(extendedEncodeURIComponent).join("/")
|
||||
);
|
||||
} else if (request.path.includes(`{${memberName}}`)) {
|
||||
request.path = request.path.replace(`{${memberName}}`, extendedEncodeURIComponent(replacement));
|
||||
}
|
||||
delete _input[memberName];
|
||||
} else if (memberTraits.httpHeader) {
|
||||
serializer.write(memberNs, inputMember);
|
||||
headers[memberTraits.httpHeader.toLowerCase()] = String(serializer.flush());
|
||||
delete _input[memberName];
|
||||
} else if (typeof memberTraits.httpPrefixHeaders === "string") {
|
||||
for (const [key, val] of Object.entries(inputMember)) {
|
||||
const amalgam = memberTraits.httpPrefixHeaders + key;
|
||||
serializer.write([memberNs.getValueSchema(), { httpHeader: amalgam }], val);
|
||||
headers[amalgam.toLowerCase()] = serializer.flush();
|
||||
}
|
||||
delete _input[memberName];
|
||||
} else if (memberTraits.httpQuery || memberTraits.httpQueryParams) {
|
||||
this.serializeQuery(memberNs, inputMember, query);
|
||||
delete _input[memberName];
|
||||
} else {
|
||||
hasNonHttpBindingMember = true;
|
||||
}
|
||||
}
|
||||
if (hasNonHttpBindingMember && input) {
|
||||
serializer.write(schema, _input);
|
||||
payload = serializer.flush();
|
||||
}
|
||||
request.headers = headers;
|
||||
request.query = query;
|
||||
request.body = payload;
|
||||
return request;
|
||||
}
|
||||
serializeQuery(ns, data, query) {
|
||||
const serializer = this.serializer;
|
||||
const traits = ns.getMergedTraits();
|
||||
if (traits.httpQueryParams) {
|
||||
for (const [key, val] of Object.entries(data)) {
|
||||
if (!(key in query)) {
|
||||
this.serializeQuery(
|
||||
import_schema2.NormalizedSchema.of([
|
||||
ns.getValueSchema(),
|
||||
{
|
||||
// We pass on the traits to the sub-schema
|
||||
// because we are still in the process of serializing the map itself.
|
||||
...traits,
|
||||
httpQuery: key,
|
||||
httpQueryParams: void 0
|
||||
}
|
||||
]),
|
||||
val,
|
||||
query
|
||||
);
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (ns.isListSchema()) {
|
||||
const sparse = !!ns.getMergedTraits().sparse;
|
||||
const buffer = [];
|
||||
for (const item of data) {
|
||||
serializer.write([ns.getValueSchema(), traits], item);
|
||||
const serializable = serializer.flush();
|
||||
if (sparse || serializable !== void 0) {
|
||||
buffer.push(serializable);
|
||||
}
|
||||
}
|
||||
query[traits.httpQuery] = buffer;
|
||||
} else {
|
||||
serializer.write([ns, traits], data);
|
||||
query[traits.httpQuery] = serializer.flush();
|
||||
}
|
||||
}
|
||||
async deserializeResponse(operationSchema, context, response) {
|
||||
const deserializer = this.deserializer;
|
||||
const ns = import_schema2.NormalizedSchema.of(operationSchema.output);
|
||||
const dataObject = {};
|
||||
if (response.statusCode >= 300) {
|
||||
const bytes = await collectBody(response.body, context);
|
||||
if (bytes.byteLength > 0) {
|
||||
Object.assign(dataObject, await deserializer.read(import_schema2.SCHEMA.DOCUMENT, bytes));
|
||||
}
|
||||
await this.handleError(operationSchema, context, response, dataObject, this.deserializeMetadata(response));
|
||||
throw new Error("@smithy/core/protocols - HTTP Protocol error handler failed to throw.");
|
||||
}
|
||||
for (const header in response.headers) {
|
||||
const value = response.headers[header];
|
||||
delete response.headers[header];
|
||||
response.headers[header.toLowerCase()] = value;
|
||||
}
|
||||
const nonHttpBindingMembers = await this.deserializeHttpMessage(ns, context, response, dataObject);
|
||||
if (nonHttpBindingMembers.length) {
|
||||
const bytes = await collectBody(response.body, context);
|
||||
if (bytes.byteLength > 0) {
|
||||
const dataFromBody = await deserializer.read(ns, bytes);
|
||||
for (const member of nonHttpBindingMembers) {
|
||||
dataObject[member] = dataFromBody[member];
|
||||
}
|
||||
}
|
||||
}
|
||||
const output = {
|
||||
$metadata: this.deserializeMetadata(response),
|
||||
...dataObject
|
||||
};
|
||||
return output;
|
||||
}
|
||||
};
|
||||
|
||||
// src/submodules/protocols/RpcProtocol.ts
|
||||
var import_schema3 = require("@smithy/core/schema");
|
||||
var import_protocol_http3 = require("@smithy/protocol-http");
|
||||
var RpcProtocol = class extends HttpProtocol {
|
||||
async serializeRequest(operationSchema, input, context) {
|
||||
const serializer = this.serializer;
|
||||
const query = {};
|
||||
const headers = {};
|
||||
const endpoint = await context.endpoint();
|
||||
const ns = import_schema3.NormalizedSchema.of(operationSchema?.input);
|
||||
const schema = ns.getSchema();
|
||||
let payload;
|
||||
const request = new import_protocol_http3.HttpRequest({
|
||||
protocol: "",
|
||||
hostname: "",
|
||||
port: void 0,
|
||||
path: "/",
|
||||
fragment: void 0,
|
||||
query,
|
||||
headers,
|
||||
body: void 0
|
||||
});
|
||||
if (endpoint) {
|
||||
this.updateServiceEndpoint(request, endpoint);
|
||||
this.setHostPrefix(request, operationSchema, input);
|
||||
}
|
||||
const _input = {
|
||||
...input
|
||||
};
|
||||
if (input) {
|
||||
serializer.write(schema, _input);
|
||||
payload = serializer.flush();
|
||||
}
|
||||
request.headers = headers;
|
||||
request.query = query;
|
||||
request.body = payload;
|
||||
request.method = "POST";
|
||||
return request;
|
||||
}
|
||||
async deserializeResponse(operationSchema, context, response) {
|
||||
const deserializer = this.deserializer;
|
||||
const ns = import_schema3.NormalizedSchema.of(operationSchema.output);
|
||||
const dataObject = {};
|
||||
if (response.statusCode >= 300) {
|
||||
const bytes2 = await collectBody(response.body, context);
|
||||
if (bytes2.byteLength > 0) {
|
||||
Object.assign(dataObject, await deserializer.read(import_schema3.SCHEMA.DOCUMENT, bytes2));
|
||||
}
|
||||
await this.handleError(operationSchema, context, response, dataObject, this.deserializeMetadata(response));
|
||||
throw new Error("@smithy/core/protocols - RPC Protocol error handler failed to throw.");
|
||||
}
|
||||
for (const header in response.headers) {
|
||||
const value = response.headers[header];
|
||||
delete response.headers[header];
|
||||
response.headers[header.toLowerCase()] = value;
|
||||
}
|
||||
const bytes = await collectBody(response.body, context);
|
||||
if (bytes.byteLength > 0) {
|
||||
Object.assign(dataObject, await deserializer.read(ns, bytes));
|
||||
}
|
||||
const output = {
|
||||
$metadata: this.deserializeMetadata(response),
|
||||
...dataObject
|
||||
};
|
||||
return output;
|
||||
}
|
||||
};
|
||||
|
||||
// src/submodules/protocols/requestBuilder.ts
|
||||
var import_protocol_http4 = require("@smithy/protocol-http");
|
||||
|
||||
// src/submodules/protocols/resolve-path.ts
|
||||
var resolvedPath = (resolvedPath2, input, memberName, labelValueProvider, uriLabel, isGreedyLabel) => {
|
||||
if (input != null && input[memberName] !== void 0) {
|
||||
const labelValue = labelValueProvider();
|
||||
if (labelValue.length <= 0) {
|
||||
throw new Error("Empty value provided for input HTTP label: " + memberName + ".");
|
||||
}
|
||||
resolvedPath2 = resolvedPath2.replace(
|
||||
uriLabel,
|
||||
isGreedyLabel ? labelValue.split("/").map((segment) => extendedEncodeURIComponent(segment)).join("/") : extendedEncodeURIComponent(labelValue)
|
||||
);
|
||||
} else {
|
||||
throw new Error("No value provided for input HTTP label: " + memberName + ".");
|
||||
}
|
||||
return resolvedPath2;
|
||||
};
|
||||
|
||||
// src/submodules/protocols/requestBuilder.ts
|
||||
function requestBuilder(input, context) {
|
||||
return new RequestBuilder(input, context);
|
||||
}
|
||||
var RequestBuilder = class {
|
||||
constructor(input, context) {
|
||||
this.input = input;
|
||||
this.context = context;
|
||||
this.query = {};
|
||||
this.method = "";
|
||||
this.headers = {};
|
||||
this.path = "";
|
||||
this.body = null;
|
||||
this.hostname = "";
|
||||
this.resolvePathStack = [];
|
||||
}
|
||||
async build() {
|
||||
const { hostname, protocol = "https", port, path: basePath } = await this.context.endpoint();
|
||||
this.path = basePath;
|
||||
for (const resolvePath of this.resolvePathStack) {
|
||||
resolvePath(this.path);
|
||||
}
|
||||
return new import_protocol_http4.HttpRequest({
|
||||
protocol,
|
||||
hostname: this.hostname || hostname,
|
||||
port,
|
||||
method: this.method,
|
||||
path: this.path,
|
||||
query: this.query,
|
||||
body: this.body,
|
||||
headers: this.headers
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Brevity setter for "hostname".
|
||||
*/
|
||||
hn(hostname) {
|
||||
this.hostname = hostname;
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Brevity initial builder for "basepath".
|
||||
*/
|
||||
bp(uriLabel) {
|
||||
this.resolvePathStack.push((basePath) => {
|
||||
this.path = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + uriLabel;
|
||||
});
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Brevity incremental builder for "path".
|
||||
*/
|
||||
p(memberName, labelValueProvider, uriLabel, isGreedyLabel) {
|
||||
this.resolvePathStack.push((path) => {
|
||||
this.path = resolvedPath(path, this.input, memberName, labelValueProvider, uriLabel, isGreedyLabel);
|
||||
});
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Brevity setter for "headers".
|
||||
*/
|
||||
h(headers) {
|
||||
this.headers = headers;
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Brevity setter for "query".
|
||||
*/
|
||||
q(query) {
|
||||
this.query = query;
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Brevity setter for "body".
|
||||
*/
|
||||
b(body) {
|
||||
this.body = body;
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Brevity setter for "method".
|
||||
*/
|
||||
m(method) {
|
||||
this.method = method;
|
||||
return this;
|
||||
}
|
||||
};
|
||||
|
||||
// src/submodules/protocols/serde/FromStringShapeDeserializer.ts
|
||||
var import_schema5 = require("@smithy/core/schema");
|
||||
var import_serde2 = require("@smithy/core/serde");
|
||||
var import_util_base64 = require("@smithy/util-base64");
|
||||
var import_util_utf8 = require("@smithy/util-utf8");
|
||||
|
||||
// src/submodules/protocols/serde/determineTimestampFormat.ts
|
||||
var import_schema4 = require("@smithy/core/schema");
|
||||
function determineTimestampFormat(ns, settings) {
|
||||
if (settings.timestampFormat.useTrait) {
|
||||
if (ns.isTimestampSchema() && (ns.getSchema() === import_schema4.SCHEMA.TIMESTAMP_DATE_TIME || ns.getSchema() === import_schema4.SCHEMA.TIMESTAMP_HTTP_DATE || ns.getSchema() === import_schema4.SCHEMA.TIMESTAMP_EPOCH_SECONDS)) {
|
||||
return ns.getSchema();
|
||||
}
|
||||
}
|
||||
const { httpLabel, httpPrefixHeaders, httpHeader, httpQuery } = ns.getMergedTraits();
|
||||
const bindingFormat = settings.httpBindings ? typeof httpPrefixHeaders === "string" || Boolean(httpHeader) ? import_schema4.SCHEMA.TIMESTAMP_HTTP_DATE : Boolean(httpQuery) || Boolean(httpLabel) ? import_schema4.SCHEMA.TIMESTAMP_DATE_TIME : void 0 : void 0;
|
||||
return bindingFormat ?? settings.timestampFormat.default;
|
||||
}
|
||||
|
||||
// src/submodules/protocols/serde/FromStringShapeDeserializer.ts
|
||||
var FromStringShapeDeserializer = class {
|
||||
constructor(settings) {
|
||||
this.settings = settings;
|
||||
}
|
||||
setSerdeContext(serdeContext) {
|
||||
this.serdeContext = serdeContext;
|
||||
}
|
||||
read(_schema, data) {
|
||||
const ns = import_schema5.NormalizedSchema.of(_schema);
|
||||
if (ns.isListSchema()) {
|
||||
return (0, import_serde2.splitHeader)(data).map((item) => this.read(ns.getValueSchema(), item));
|
||||
}
|
||||
if (ns.isBlobSchema()) {
|
||||
return (this.serdeContext?.base64Decoder ?? import_util_base64.fromBase64)(data);
|
||||
}
|
||||
if (ns.isTimestampSchema()) {
|
||||
const format = determineTimestampFormat(ns, this.settings);
|
||||
switch (format) {
|
||||
case import_schema5.SCHEMA.TIMESTAMP_DATE_TIME:
|
||||
return (0, import_serde2.parseRfc3339DateTimeWithOffset)(data);
|
||||
case import_schema5.SCHEMA.TIMESTAMP_HTTP_DATE:
|
||||
return (0, import_serde2.parseRfc7231DateTime)(data);
|
||||
case import_schema5.SCHEMA.TIMESTAMP_EPOCH_SECONDS:
|
||||
return (0, import_serde2.parseEpochTimestamp)(data);
|
||||
default:
|
||||
console.warn("Missing timestamp format, parsing value with Date constructor:", data);
|
||||
return new Date(data);
|
||||
}
|
||||
}
|
||||
if (ns.isStringSchema()) {
|
||||
const mediaType = ns.getMergedTraits().mediaType;
|
||||
let intermediateValue = data;
|
||||
if (mediaType) {
|
||||
if (ns.getMergedTraits().httpHeader) {
|
||||
intermediateValue = this.base64ToUtf8(intermediateValue);
|
||||
}
|
||||
const isJson = mediaType === "application/json" || mediaType.endsWith("+json");
|
||||
if (isJson) {
|
||||
intermediateValue = import_serde2.LazyJsonString.from(intermediateValue);
|
||||
}
|
||||
return intermediateValue;
|
||||
}
|
||||
}
|
||||
switch (true) {
|
||||
case ns.isNumericSchema():
|
||||
return Number(data);
|
||||
case ns.isBigIntegerSchema():
|
||||
return BigInt(data);
|
||||
case ns.isBigDecimalSchema():
|
||||
return new import_serde2.NumericValue(data, "bigDecimal");
|
||||
case ns.isBooleanSchema():
|
||||
return String(data).toLowerCase() === "true";
|
||||
}
|
||||
return data;
|
||||
}
|
||||
base64ToUtf8(base64String) {
|
||||
return (this.serdeContext?.utf8Encoder ?? import_util_utf8.toUtf8)((this.serdeContext?.base64Decoder ?? import_util_base64.fromBase64)(base64String));
|
||||
}
|
||||
};
|
||||
|
||||
// src/submodules/protocols/serde/HttpInterceptingShapeDeserializer.ts
|
||||
var import_schema6 = require("@smithy/core/schema");
|
||||
var import_util_utf82 = require("@smithy/util-utf8");
|
||||
var HttpInterceptingShapeDeserializer = class {
|
||||
constructor(codecDeserializer, codecSettings) {
|
||||
this.codecDeserializer = codecDeserializer;
|
||||
this.stringDeserializer = new FromStringShapeDeserializer(codecSettings);
|
||||
}
|
||||
setSerdeContext(serdeContext) {
|
||||
this.stringDeserializer.setSerdeContext(serdeContext);
|
||||
this.codecDeserializer.setSerdeContext(serdeContext);
|
||||
this.serdeContext = serdeContext;
|
||||
}
|
||||
read(schema, data) {
|
||||
const ns = import_schema6.NormalizedSchema.of(schema);
|
||||
const traits = ns.getMergedTraits();
|
||||
const toString = this.serdeContext?.utf8Encoder ?? import_util_utf82.toUtf8;
|
||||
if (traits.httpHeader || traits.httpResponseCode) {
|
||||
return this.stringDeserializer.read(ns, toString(data));
|
||||
}
|
||||
if (traits.httpPayload) {
|
||||
if (ns.isBlobSchema()) {
|
||||
const toBytes = this.serdeContext?.utf8Decoder ?? import_util_utf82.fromUtf8;
|
||||
if (typeof data === "string") {
|
||||
return toBytes(data);
|
||||
}
|
||||
return data;
|
||||
} else if (ns.isStringSchema()) {
|
||||
if ("byteLength" in data) {
|
||||
return toString(data);
|
||||
}
|
||||
return data;
|
||||
}
|
||||
}
|
||||
return this.codecDeserializer.read(ns, data);
|
||||
}
|
||||
};
|
||||
|
||||
// src/submodules/protocols/serde/HttpInterceptingShapeSerializer.ts
|
||||
var import_schema8 = require("@smithy/core/schema");
|
||||
|
||||
// src/submodules/protocols/serde/ToStringShapeSerializer.ts
|
||||
var import_schema7 = require("@smithy/core/schema");
|
||||
var import_serde3 = require("@smithy/core/serde");
|
||||
var import_util_base642 = require("@smithy/util-base64");
|
||||
var ToStringShapeSerializer = class {
|
||||
constructor(settings) {
|
||||
this.settings = settings;
|
||||
this.stringBuffer = "";
|
||||
this.serdeContext = void 0;
|
||||
}
|
||||
setSerdeContext(serdeContext) {
|
||||
this.serdeContext = serdeContext;
|
||||
}
|
||||
write(schema, value) {
|
||||
const ns = import_schema7.NormalizedSchema.of(schema);
|
||||
switch (typeof value) {
|
||||
case "object":
|
||||
if (value === null) {
|
||||
this.stringBuffer = "null";
|
||||
return;
|
||||
}
|
||||
if (ns.isTimestampSchema()) {
|
||||
if (!(value instanceof Date)) {
|
||||
throw new Error(
|
||||
`@smithy/core/protocols - received non-Date value ${value} when schema expected Date in ${ns.getName(true)}`
|
||||
);
|
||||
}
|
||||
const format = determineTimestampFormat(ns, this.settings);
|
||||
switch (format) {
|
||||
case import_schema7.SCHEMA.TIMESTAMP_DATE_TIME:
|
||||
this.stringBuffer = value.toISOString().replace(".000Z", "Z");
|
||||
break;
|
||||
case import_schema7.SCHEMA.TIMESTAMP_HTTP_DATE:
|
||||
this.stringBuffer = (0, import_serde3.dateToUtcString)(value);
|
||||
break;
|
||||
case import_schema7.SCHEMA.TIMESTAMP_EPOCH_SECONDS:
|
||||
this.stringBuffer = String(value.getTime() / 1e3);
|
||||
break;
|
||||
default:
|
||||
console.warn("Missing timestamp format, using epoch seconds", value);
|
||||
this.stringBuffer = String(value.getTime() / 1e3);
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (ns.isBlobSchema() && "byteLength" in value) {
|
||||
this.stringBuffer = (this.serdeContext?.base64Encoder ?? import_util_base642.toBase64)(value);
|
||||
return;
|
||||
}
|
||||
if (ns.isListSchema() && Array.isArray(value)) {
|
||||
let buffer = "";
|
||||
for (const item of value) {
|
||||
this.write([ns.getValueSchema(), ns.getMergedTraits()], item);
|
||||
const headerItem = this.flush();
|
||||
const serialized = ns.getValueSchema().isTimestampSchema() ? headerItem : (0, import_serde3.quoteHeader)(headerItem);
|
||||
if (buffer !== "") {
|
||||
buffer += ", ";
|
||||
}
|
||||
buffer += serialized;
|
||||
}
|
||||
this.stringBuffer = buffer;
|
||||
return;
|
||||
}
|
||||
this.stringBuffer = JSON.stringify(value, null, 2);
|
||||
break;
|
||||
case "string":
|
||||
const mediaType = ns.getMergedTraits().mediaType;
|
||||
let intermediateValue = value;
|
||||
if (mediaType) {
|
||||
const isJson = mediaType === "application/json" || mediaType.endsWith("+json");
|
||||
if (isJson) {
|
||||
intermediateValue = import_serde3.LazyJsonString.from(intermediateValue);
|
||||
}
|
||||
if (ns.getMergedTraits().httpHeader) {
|
||||
this.stringBuffer = (this.serdeContext?.base64Encoder ?? import_util_base642.toBase64)(intermediateValue.toString());
|
||||
return;
|
||||
}
|
||||
}
|
||||
this.stringBuffer = value;
|
||||
break;
|
||||
default:
|
||||
this.stringBuffer = String(value);
|
||||
}
|
||||
}
|
||||
flush() {
|
||||
const buffer = this.stringBuffer;
|
||||
this.stringBuffer = "";
|
||||
return buffer;
|
||||
}
|
||||
};
|
||||
|
||||
// src/submodules/protocols/serde/HttpInterceptingShapeSerializer.ts
|
||||
var HttpInterceptingShapeSerializer = class {
|
||||
constructor(codecSerializer, codecSettings, stringSerializer = new ToStringShapeSerializer(codecSettings)) {
|
||||
this.codecSerializer = codecSerializer;
|
||||
this.stringSerializer = stringSerializer;
|
||||
}
|
||||
setSerdeContext(serdeContext) {
|
||||
this.codecSerializer.setSerdeContext(serdeContext);
|
||||
this.stringSerializer.setSerdeContext(serdeContext);
|
||||
}
|
||||
write(schema, value) {
|
||||
const ns = import_schema8.NormalizedSchema.of(schema);
|
||||
const traits = ns.getMergedTraits();
|
||||
if (traits.httpHeader || traits.httpLabel || traits.httpQuery) {
|
||||
this.stringSerializer.write(ns, value);
|
||||
this.buffer = this.stringSerializer.flush();
|
||||
return;
|
||||
}
|
||||
return this.codecSerializer.write(ns, value);
|
||||
}
|
||||
flush() {
|
||||
if (this.buffer !== void 0) {
|
||||
const buffer = this.buffer;
|
||||
this.buffer = void 0;
|
||||
return buffer;
|
||||
}
|
||||
return this.codecSerializer.flush();
|
||||
}
|
||||
};
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
FromStringShapeDeserializer,
|
||||
HttpBindingProtocol,
|
||||
HttpInterceptingShapeDeserializer,
|
||||
HttpInterceptingShapeSerializer,
|
||||
RequestBuilder,
|
||||
RpcProtocol,
|
||||
ToStringShapeSerializer,
|
||||
collectBody,
|
||||
determineTimestampFormat,
|
||||
extendedEncodeURIComponent,
|
||||
requestBuilder,
|
||||
resolvedPath
|
||||
});
|
||||
Generated
Vendored
+774
@@ -0,0 +1,774 @@
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
|
||||
// src/submodules/schema/index.ts
|
||||
var schema_exports = {};
|
||||
__export(schema_exports, {
|
||||
ErrorSchema: () => ErrorSchema,
|
||||
ListSchema: () => ListSchema,
|
||||
MapSchema: () => MapSchema,
|
||||
NormalizedSchema: () => NormalizedSchema,
|
||||
OperationSchema: () => OperationSchema,
|
||||
SCHEMA: () => SCHEMA,
|
||||
Schema: () => Schema,
|
||||
SimpleSchema: () => SimpleSchema,
|
||||
StructureSchema: () => StructureSchema,
|
||||
TypeRegistry: () => TypeRegistry,
|
||||
deref: () => deref,
|
||||
deserializerMiddlewareOption: () => deserializerMiddlewareOption,
|
||||
error: () => error,
|
||||
getSchemaSerdePlugin: () => getSchemaSerdePlugin,
|
||||
list: () => list,
|
||||
map: () => map,
|
||||
op: () => op,
|
||||
serializerMiddlewareOption: () => serializerMiddlewareOption,
|
||||
sim: () => sim,
|
||||
struct: () => struct
|
||||
});
|
||||
module.exports = __toCommonJS(schema_exports);
|
||||
|
||||
// src/submodules/schema/deref.ts
|
||||
var deref = (schemaRef) => {
|
||||
if (typeof schemaRef === "function") {
|
||||
return schemaRef();
|
||||
}
|
||||
return schemaRef;
|
||||
};
|
||||
|
||||
// src/submodules/schema/middleware/schemaDeserializationMiddleware.ts
|
||||
var import_protocol_http = require("@smithy/protocol-http");
|
||||
var import_util_middleware = require("@smithy/util-middleware");
|
||||
var schemaDeserializationMiddleware = (config) => (next, context) => async (args) => {
|
||||
const { response } = await next(args);
|
||||
const { operationSchema } = (0, import_util_middleware.getSmithyContext)(context);
|
||||
try {
|
||||
const parsed = await config.protocol.deserializeResponse(
|
||||
operationSchema,
|
||||
{
|
||||
...config,
|
||||
...context
|
||||
},
|
||||
response
|
||||
);
|
||||
return {
|
||||
response,
|
||||
output: parsed
|
||||
};
|
||||
} catch (error2) {
|
||||
Object.defineProperty(error2, "$response", {
|
||||
value: response
|
||||
});
|
||||
if (!("$metadata" in error2)) {
|
||||
const hint = `Deserialization error: to see the raw response, inspect the hidden field {error}.$response on this object.`;
|
||||
try {
|
||||
error2.message += "\n " + hint;
|
||||
} catch (e) {
|
||||
if (!context.logger || context.logger?.constructor?.name === "NoOpLogger") {
|
||||
console.warn(hint);
|
||||
} else {
|
||||
context.logger?.warn?.(hint);
|
||||
}
|
||||
}
|
||||
if (typeof error2.$responseBodyText !== "undefined") {
|
||||
if (error2.$response) {
|
||||
error2.$response.body = error2.$responseBodyText;
|
||||
}
|
||||
}
|
||||
try {
|
||||
if (import_protocol_http.HttpResponse.isInstance(response)) {
|
||||
const { headers = {} } = response;
|
||||
const headerEntries = Object.entries(headers);
|
||||
error2.$metadata = {
|
||||
httpStatusCode: response.statusCode,
|
||||
requestId: findHeader(/^x-[\w-]+-request-?id$/, headerEntries),
|
||||
extendedRequestId: findHeader(/^x-[\w-]+-id-2$/, headerEntries),
|
||||
cfId: findHeader(/^x-[\w-]+-cf-id$/, headerEntries)
|
||||
};
|
||||
}
|
||||
} catch (e) {
|
||||
}
|
||||
}
|
||||
throw error2;
|
||||
}
|
||||
};
|
||||
var findHeader = (pattern, headers) => {
|
||||
return (headers.find(([k]) => {
|
||||
return k.match(pattern);
|
||||
}) || [void 0, void 0])[1];
|
||||
};
|
||||
|
||||
// src/submodules/schema/middleware/schemaSerializationMiddleware.ts
|
||||
var import_util_middleware2 = require("@smithy/util-middleware");
|
||||
var schemaSerializationMiddleware = (config) => (next, context) => async (args) => {
|
||||
const { operationSchema } = (0, import_util_middleware2.getSmithyContext)(context);
|
||||
const endpoint = context.endpointV2?.url && config.urlParser ? async () => config.urlParser(context.endpointV2.url) : config.endpoint;
|
||||
const request = await config.protocol.serializeRequest(operationSchema, args.input, {
|
||||
...config,
|
||||
...context,
|
||||
endpoint
|
||||
});
|
||||
return next({
|
||||
...args,
|
||||
request
|
||||
});
|
||||
};
|
||||
|
||||
// src/submodules/schema/middleware/getSchemaSerdePlugin.ts
|
||||
var deserializerMiddlewareOption = {
|
||||
name: "deserializerMiddleware",
|
||||
step: "deserialize",
|
||||
tags: ["DESERIALIZER"],
|
||||
override: true
|
||||
};
|
||||
var serializerMiddlewareOption = {
|
||||
name: "serializerMiddleware",
|
||||
step: "serialize",
|
||||
tags: ["SERIALIZER"],
|
||||
override: true
|
||||
};
|
||||
function getSchemaSerdePlugin(config) {
|
||||
return {
|
||||
applyToStack: (commandStack) => {
|
||||
commandStack.add(schemaSerializationMiddleware(config), serializerMiddlewareOption);
|
||||
commandStack.add(schemaDeserializationMiddleware(config), deserializerMiddlewareOption);
|
||||
config.protocol.setSerdeContext(config);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// src/submodules/schema/TypeRegistry.ts
|
||||
var TypeRegistry = class _TypeRegistry {
|
||||
constructor(namespace, schemas = /* @__PURE__ */ new Map()) {
|
||||
this.namespace = namespace;
|
||||
this.schemas = schemas;
|
||||
}
|
||||
static {
|
||||
this.registries = /* @__PURE__ */ new Map();
|
||||
}
|
||||
/**
|
||||
* @param namespace - specifier.
|
||||
* @returns the schema for that namespace, creating it if necessary.
|
||||
*/
|
||||
static for(namespace) {
|
||||
if (!_TypeRegistry.registries.has(namespace)) {
|
||||
_TypeRegistry.registries.set(namespace, new _TypeRegistry(namespace));
|
||||
}
|
||||
return _TypeRegistry.registries.get(namespace);
|
||||
}
|
||||
/**
|
||||
* Adds the given schema to a type registry with the same namespace.
|
||||
*
|
||||
* @param shapeId - to be registered.
|
||||
* @param schema - to be registered.
|
||||
*/
|
||||
register(shapeId, schema) {
|
||||
const qualifiedName = this.normalizeShapeId(shapeId);
|
||||
const registry = _TypeRegistry.for(this.getNamespace(shapeId));
|
||||
registry.schemas.set(qualifiedName, schema);
|
||||
}
|
||||
/**
|
||||
* @param shapeId - query.
|
||||
* @returns the schema.
|
||||
*/
|
||||
getSchema(shapeId) {
|
||||
const id = this.normalizeShapeId(shapeId);
|
||||
if (!this.schemas.has(id)) {
|
||||
throw new Error(`@smithy/core/schema - schema not found for ${id}`);
|
||||
}
|
||||
return this.schemas.get(id);
|
||||
}
|
||||
/**
|
||||
* The smithy-typescript code generator generates a synthetic (i.e. unmodeled) base exception,
|
||||
* because generated SDKs before the introduction of schemas have the notion of a ServiceBaseException, which
|
||||
* is unique per service/model.
|
||||
*
|
||||
* This is generated under a unique prefix that is combined with the service namespace, and this
|
||||
* method is used to retrieve it.
|
||||
*
|
||||
* The base exception synthetic schema is used when an error is returned by a service, but we cannot
|
||||
* determine what existing schema to use to deserialize it.
|
||||
*
|
||||
* @returns the synthetic base exception of the service namespace associated with this registry instance.
|
||||
*/
|
||||
getBaseException() {
|
||||
for (const [id, schema] of this.schemas.entries()) {
|
||||
if (id.startsWith("smithy.ts.sdk.synthetic.") && id.endsWith("ServiceException")) {
|
||||
return schema;
|
||||
}
|
||||
}
|
||||
return void 0;
|
||||
}
|
||||
/**
|
||||
* @param predicate - criterion.
|
||||
* @returns a schema in this registry matching the predicate.
|
||||
*/
|
||||
find(predicate) {
|
||||
return [...this.schemas.values()].find(predicate);
|
||||
}
|
||||
/**
|
||||
* Unloads the current TypeRegistry.
|
||||
*/
|
||||
destroy() {
|
||||
_TypeRegistry.registries.delete(this.namespace);
|
||||
this.schemas.clear();
|
||||
}
|
||||
normalizeShapeId(shapeId) {
|
||||
if (shapeId.includes("#")) {
|
||||
return shapeId;
|
||||
}
|
||||
return this.namespace + "#" + shapeId;
|
||||
}
|
||||
getNamespace(shapeId) {
|
||||
return this.normalizeShapeId(shapeId).split("#")[0];
|
||||
}
|
||||
};
|
||||
|
||||
// src/submodules/schema/schemas/Schema.ts
|
||||
var Schema = class {
|
||||
constructor(name, traits) {
|
||||
this.name = name;
|
||||
this.traits = traits;
|
||||
}
|
||||
};
|
||||
|
||||
// src/submodules/schema/schemas/ListSchema.ts
|
||||
var ListSchema = class extends Schema {
|
||||
constructor(name, traits, valueSchema) {
|
||||
super(name, traits);
|
||||
this.name = name;
|
||||
this.traits = traits;
|
||||
this.valueSchema = valueSchema;
|
||||
}
|
||||
};
|
||||
function list(namespace, name, traits = {}, valueSchema) {
|
||||
const schema = new ListSchema(
|
||||
namespace + "#" + name,
|
||||
traits,
|
||||
typeof valueSchema === "function" ? valueSchema() : valueSchema
|
||||
);
|
||||
TypeRegistry.for(namespace).register(name, schema);
|
||||
return schema;
|
||||
}
|
||||
|
||||
// src/submodules/schema/schemas/MapSchema.ts
|
||||
var MapSchema = class extends Schema {
|
||||
constructor(name, traits, keySchema, valueSchema) {
|
||||
super(name, traits);
|
||||
this.name = name;
|
||||
this.traits = traits;
|
||||
this.keySchema = keySchema;
|
||||
this.valueSchema = valueSchema;
|
||||
}
|
||||
};
|
||||
function map(namespace, name, traits = {}, keySchema, valueSchema) {
|
||||
const schema = new MapSchema(
|
||||
namespace + "#" + name,
|
||||
traits,
|
||||
keySchema,
|
||||
typeof valueSchema === "function" ? valueSchema() : valueSchema
|
||||
);
|
||||
TypeRegistry.for(namespace).register(name, schema);
|
||||
return schema;
|
||||
}
|
||||
|
||||
// src/submodules/schema/schemas/OperationSchema.ts
|
||||
var OperationSchema = class extends Schema {
|
||||
constructor(name, traits, input, output) {
|
||||
super(name, traits);
|
||||
this.name = name;
|
||||
this.traits = traits;
|
||||
this.input = input;
|
||||
this.output = output;
|
||||
}
|
||||
};
|
||||
function op(namespace, name, traits = {}, input, output) {
|
||||
const schema = new OperationSchema(namespace + "#" + name, traits, input, output);
|
||||
TypeRegistry.for(namespace).register(name, schema);
|
||||
return schema;
|
||||
}
|
||||
|
||||
// src/submodules/schema/schemas/StructureSchema.ts
|
||||
var StructureSchema = class extends Schema {
|
||||
constructor(name, traits, memberNames, memberList) {
|
||||
super(name, traits);
|
||||
this.name = name;
|
||||
this.traits = traits;
|
||||
this.memberNames = memberNames;
|
||||
this.memberList = memberList;
|
||||
this.members = {};
|
||||
for (let i = 0; i < memberNames.length; ++i) {
|
||||
this.members[memberNames[i]] = Array.isArray(memberList[i]) ? memberList[i] : [memberList[i], 0];
|
||||
}
|
||||
}
|
||||
};
|
||||
function struct(namespace, name, traits, memberNames, memberList) {
|
||||
const schema = new StructureSchema(namespace + "#" + name, traits, memberNames, memberList);
|
||||
TypeRegistry.for(namespace).register(name, schema);
|
||||
return schema;
|
||||
}
|
||||
|
||||
// src/submodules/schema/schemas/ErrorSchema.ts
|
||||
var ErrorSchema = class extends StructureSchema {
|
||||
constructor(name, traits, memberNames, memberList, ctor) {
|
||||
super(name, traits, memberNames, memberList);
|
||||
this.name = name;
|
||||
this.traits = traits;
|
||||
this.memberNames = memberNames;
|
||||
this.memberList = memberList;
|
||||
this.ctor = ctor;
|
||||
}
|
||||
};
|
||||
function error(namespace, name, traits = {}, memberNames, memberList, ctor) {
|
||||
const schema = new ErrorSchema(namespace + "#" + name, traits, memberNames, memberList, ctor);
|
||||
TypeRegistry.for(namespace).register(name, schema);
|
||||
return schema;
|
||||
}
|
||||
|
||||
// src/submodules/schema/schemas/sentinels.ts
|
||||
var SCHEMA = {
|
||||
BLOB: 21,
|
||||
// 21
|
||||
STREAMING_BLOB: 42,
|
||||
// 42
|
||||
BOOLEAN: 2,
|
||||
// 2
|
||||
STRING: 0,
|
||||
// 0
|
||||
NUMERIC: 1,
|
||||
// 1
|
||||
BIG_INTEGER: 17,
|
||||
// 17
|
||||
BIG_DECIMAL: 19,
|
||||
// 19
|
||||
DOCUMENT: 15,
|
||||
// 15
|
||||
TIMESTAMP_DEFAULT: 4,
|
||||
// 4
|
||||
TIMESTAMP_DATE_TIME: 5,
|
||||
// 5
|
||||
TIMESTAMP_HTTP_DATE: 6,
|
||||
// 6
|
||||
TIMESTAMP_EPOCH_SECONDS: 7,
|
||||
// 7
|
||||
LIST_MODIFIER: 64,
|
||||
// 64
|
||||
MAP_MODIFIER: 128
|
||||
// 128
|
||||
};
|
||||
|
||||
// src/submodules/schema/schemas/SimpleSchema.ts
|
||||
var SimpleSchema = class extends Schema {
|
||||
constructor(name, schemaRef, traits) {
|
||||
super(name, traits);
|
||||
this.name = name;
|
||||
this.schemaRef = schemaRef;
|
||||
this.traits = traits;
|
||||
}
|
||||
};
|
||||
function sim(namespace, name, schemaRef, traits) {
|
||||
const schema = new SimpleSchema(namespace + "#" + name, schemaRef, traits);
|
||||
TypeRegistry.for(namespace).register(name, schema);
|
||||
return schema;
|
||||
}
|
||||
|
||||
// src/submodules/schema/schemas/NormalizedSchema.ts
|
||||
var NormalizedSchema = class _NormalizedSchema {
|
||||
/**
|
||||
* @param ref - a polymorphic SchemaRef to be dereferenced/normalized.
|
||||
* @param memberName - optional memberName if this NormalizedSchema should be considered a member schema.
|
||||
*/
|
||||
constructor(ref, memberName) {
|
||||
this.ref = ref;
|
||||
this.memberName = memberName;
|
||||
const traitStack = [];
|
||||
let _ref = ref;
|
||||
let schema = ref;
|
||||
this._isMemberSchema = false;
|
||||
while (Array.isArray(_ref)) {
|
||||
traitStack.push(_ref[1]);
|
||||
_ref = _ref[0];
|
||||
schema = deref(_ref);
|
||||
this._isMemberSchema = true;
|
||||
}
|
||||
if (traitStack.length > 0) {
|
||||
this.memberTraits = {};
|
||||
for (let i = traitStack.length - 1; i >= 0; --i) {
|
||||
const traitSet = traitStack[i];
|
||||
Object.assign(this.memberTraits, _NormalizedSchema.translateTraits(traitSet));
|
||||
}
|
||||
} else {
|
||||
this.memberTraits = 0;
|
||||
}
|
||||
if (schema instanceof _NormalizedSchema) {
|
||||
this.name = schema.name;
|
||||
this.traits = schema.traits;
|
||||
this._isMemberSchema = schema._isMemberSchema;
|
||||
this.schema = schema.schema;
|
||||
this.memberTraits = Object.assign({}, schema.getMemberTraits(), this.getMemberTraits());
|
||||
this.normalizedTraits = void 0;
|
||||
this.ref = schema.ref;
|
||||
this.memberName = memberName ?? schema.memberName;
|
||||
return;
|
||||
}
|
||||
this.schema = deref(schema);
|
||||
if (this.schema && typeof this.schema === "object") {
|
||||
this.traits = this.schema?.traits ?? {};
|
||||
} else {
|
||||
this.traits = 0;
|
||||
}
|
||||
this.name = (typeof this.schema === "object" ? this.schema?.name : void 0) ?? this.memberName ?? this.getSchemaName();
|
||||
if (this._isMemberSchema && !memberName) {
|
||||
throw new Error(
|
||||
`@smithy/core/schema - NormalizedSchema member schema ${this.getName(true)} must initialize with memberName argument.`
|
||||
);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Static constructor that attempts to avoid wrapping a NormalizedSchema within another.
|
||||
*/
|
||||
static of(ref, memberName) {
|
||||
if (ref instanceof _NormalizedSchema) {
|
||||
return ref;
|
||||
}
|
||||
return new _NormalizedSchema(ref, memberName);
|
||||
}
|
||||
/**
|
||||
* @param indicator - numeric indicator for preset trait combination.
|
||||
* @returns equivalent trait object.
|
||||
*/
|
||||
static translateTraits(indicator) {
|
||||
if (typeof indicator === "object") {
|
||||
return indicator;
|
||||
}
|
||||
indicator = indicator | 0;
|
||||
const traits = {};
|
||||
if ((indicator & 1) === 1) {
|
||||
traits.httpLabel = 1;
|
||||
}
|
||||
if ((indicator >> 1 & 1) === 1) {
|
||||
traits.idempotent = 1;
|
||||
}
|
||||
if ((indicator >> 2 & 1) === 1) {
|
||||
traits.idempotencyToken = 1;
|
||||
}
|
||||
if ((indicator >> 3 & 1) === 1) {
|
||||
traits.sensitive = 1;
|
||||
}
|
||||
if ((indicator >> 4 & 1) === 1) {
|
||||
traits.httpPayload = 1;
|
||||
}
|
||||
if ((indicator >> 5 & 1) === 1) {
|
||||
traits.httpResponseCode = 1;
|
||||
}
|
||||
if ((indicator >> 6 & 1) === 1) {
|
||||
traits.httpQueryParams = 1;
|
||||
}
|
||||
return traits;
|
||||
}
|
||||
/**
|
||||
* Creates a normalized member schema from the given schema and member name.
|
||||
*/
|
||||
static memberFrom(memberSchema, memberName) {
|
||||
if (memberSchema instanceof _NormalizedSchema) {
|
||||
memberSchema.memberName = memberName;
|
||||
memberSchema._isMemberSchema = true;
|
||||
return memberSchema;
|
||||
}
|
||||
return new _NormalizedSchema(memberSchema, memberName);
|
||||
}
|
||||
/**
|
||||
* @returns the underlying non-normalized schema.
|
||||
*/
|
||||
getSchema() {
|
||||
if (this.schema instanceof _NormalizedSchema) {
|
||||
return this.schema = this.schema.getSchema();
|
||||
}
|
||||
if (this.schema instanceof SimpleSchema) {
|
||||
return deref(this.schema.schemaRef);
|
||||
}
|
||||
return deref(this.schema);
|
||||
}
|
||||
/**
|
||||
* @param withNamespace - qualifies the name.
|
||||
* @returns e.g. `MyShape` or `com.namespace#MyShape`.
|
||||
*/
|
||||
getName(withNamespace = false) {
|
||||
if (!withNamespace) {
|
||||
if (this.name && this.name.includes("#")) {
|
||||
return this.name.split("#")[1];
|
||||
}
|
||||
}
|
||||
return this.name || void 0;
|
||||
}
|
||||
/**
|
||||
* @returns the member name if the schema is a member schema.
|
||||
* @throws Error when the schema isn't a member schema.
|
||||
*/
|
||||
getMemberName() {
|
||||
if (!this.isMemberSchema()) {
|
||||
throw new Error(`@smithy/core/schema - cannot get member name on non-member schema: ${this.getName(true)}`);
|
||||
}
|
||||
return this.memberName;
|
||||
}
|
||||
isMemberSchema() {
|
||||
return this._isMemberSchema;
|
||||
}
|
||||
isUnitSchema() {
|
||||
return this.getSchema() === "unit";
|
||||
}
|
||||
/**
|
||||
* boolean methods on this class help control flow in shape serialization and deserialization.
|
||||
*/
|
||||
isListSchema() {
|
||||
const inner = this.getSchema();
|
||||
if (typeof inner === "number") {
|
||||
return inner >= SCHEMA.LIST_MODIFIER && inner < SCHEMA.MAP_MODIFIER;
|
||||
}
|
||||
return inner instanceof ListSchema;
|
||||
}
|
||||
isMapSchema() {
|
||||
const inner = this.getSchema();
|
||||
if (typeof inner === "number") {
|
||||
return inner >= SCHEMA.MAP_MODIFIER && inner <= 255;
|
||||
}
|
||||
return inner instanceof MapSchema;
|
||||
}
|
||||
isDocumentSchema() {
|
||||
return this.getSchema() === SCHEMA.DOCUMENT;
|
||||
}
|
||||
isStructSchema() {
|
||||
const inner = this.getSchema();
|
||||
return inner !== null && typeof inner === "object" && "members" in inner || inner instanceof StructureSchema;
|
||||
}
|
||||
isBlobSchema() {
|
||||
return this.getSchema() === SCHEMA.BLOB || this.getSchema() === SCHEMA.STREAMING_BLOB;
|
||||
}
|
||||
isTimestampSchema() {
|
||||
const schema = this.getSchema();
|
||||
return typeof schema === "number" && schema >= SCHEMA.TIMESTAMP_DEFAULT && schema <= SCHEMA.TIMESTAMP_EPOCH_SECONDS;
|
||||
}
|
||||
isStringSchema() {
|
||||
return this.getSchema() === SCHEMA.STRING;
|
||||
}
|
||||
isBooleanSchema() {
|
||||
return this.getSchema() === SCHEMA.BOOLEAN;
|
||||
}
|
||||
isNumericSchema() {
|
||||
return this.getSchema() === SCHEMA.NUMERIC;
|
||||
}
|
||||
isBigIntegerSchema() {
|
||||
return this.getSchema() === SCHEMA.BIG_INTEGER;
|
||||
}
|
||||
isBigDecimalSchema() {
|
||||
return this.getSchema() === SCHEMA.BIG_DECIMAL;
|
||||
}
|
||||
isStreaming() {
|
||||
const streaming = !!this.getMergedTraits().streaming;
|
||||
if (streaming) {
|
||||
return true;
|
||||
}
|
||||
return this.getSchema() === SCHEMA.STREAMING_BLOB;
|
||||
}
|
||||
/**
|
||||
* @returns own traits merged with member traits, where member traits of the same trait key take priority.
|
||||
* This method is cached.
|
||||
*/
|
||||
getMergedTraits() {
|
||||
if (this.normalizedTraits) {
|
||||
return this.normalizedTraits;
|
||||
}
|
||||
this.normalizedTraits = {
|
||||
...this.getOwnTraits(),
|
||||
...this.getMemberTraits()
|
||||
};
|
||||
return this.normalizedTraits;
|
||||
}
|
||||
/**
|
||||
* @returns only the member traits. If the schema is not a member, this returns empty.
|
||||
*/
|
||||
getMemberTraits() {
|
||||
return _NormalizedSchema.translateTraits(this.memberTraits);
|
||||
}
|
||||
/**
|
||||
* @returns only the traits inherent to the shape or member target shape if this schema is a member.
|
||||
* If there are any member traits they are excluded.
|
||||
*/
|
||||
getOwnTraits() {
|
||||
return _NormalizedSchema.translateTraits(this.traits);
|
||||
}
|
||||
/**
|
||||
* @returns the map's key's schema. Returns a dummy Document schema if this schema is a Document.
|
||||
*
|
||||
* @throws Error if the schema is not a Map or Document.
|
||||
*/
|
||||
getKeySchema() {
|
||||
if (this.isDocumentSchema()) {
|
||||
return _NormalizedSchema.memberFrom([SCHEMA.DOCUMENT, 0], "key");
|
||||
}
|
||||
if (!this.isMapSchema()) {
|
||||
throw new Error(`@smithy/core/schema - cannot get key schema for non-map schema: ${this.getName(true)}`);
|
||||
}
|
||||
const schema = this.getSchema();
|
||||
if (typeof schema === "number") {
|
||||
return _NormalizedSchema.memberFrom([63 & schema, 0], "key");
|
||||
}
|
||||
return _NormalizedSchema.memberFrom([schema.keySchema, 0], "key");
|
||||
}
|
||||
/**
|
||||
* @returns the schema of the map's value or list's member.
|
||||
* Returns a dummy Document schema if this schema is a Document.
|
||||
*
|
||||
* @throws Error if the schema is not a Map, List, nor Document.
|
||||
*/
|
||||
getValueSchema() {
|
||||
const schema = this.getSchema();
|
||||
if (typeof schema === "number") {
|
||||
if (this.isMapSchema()) {
|
||||
return _NormalizedSchema.memberFrom([63 & schema, 0], "value");
|
||||
} else if (this.isListSchema()) {
|
||||
return _NormalizedSchema.memberFrom([63 & schema, 0], "member");
|
||||
}
|
||||
}
|
||||
if (schema && typeof schema === "object") {
|
||||
if (this.isStructSchema()) {
|
||||
throw new Error(`cannot call getValueSchema() with StructureSchema ${this.getName(true)}`);
|
||||
}
|
||||
const collection = schema;
|
||||
if ("valueSchema" in collection) {
|
||||
if (this.isMapSchema()) {
|
||||
return _NormalizedSchema.memberFrom([collection.valueSchema, 0], "value");
|
||||
} else if (this.isListSchema()) {
|
||||
return _NormalizedSchema.memberFrom([collection.valueSchema, 0], "member");
|
||||
}
|
||||
}
|
||||
}
|
||||
if (this.isDocumentSchema()) {
|
||||
return _NormalizedSchema.memberFrom([SCHEMA.DOCUMENT, 0], "value");
|
||||
}
|
||||
throw new Error(`@smithy/core/schema - the schema ${this.getName(true)} does not have a value member.`);
|
||||
}
|
||||
/**
|
||||
* @returns the NormalizedSchema for the given member name. The returned instance will return true for `isMemberSchema()`
|
||||
* and will have the member name given.
|
||||
* @param member - which member to retrieve and wrap.
|
||||
*
|
||||
* @throws Error if member does not exist or the schema is neither a document nor structure.
|
||||
* Note that errors are assumed to be structures and unions are considered structures for these purposes.
|
||||
*/
|
||||
getMemberSchema(member) {
|
||||
if (this.isStructSchema()) {
|
||||
const struct2 = this.getSchema();
|
||||
if (!(member in struct2.members)) {
|
||||
throw new Error(
|
||||
`@smithy/core/schema - the schema ${this.getName(true)} does not have a member with name=${member}.`
|
||||
);
|
||||
}
|
||||
return _NormalizedSchema.memberFrom(struct2.members[member], member);
|
||||
}
|
||||
if (this.isDocumentSchema()) {
|
||||
return _NormalizedSchema.memberFrom([SCHEMA.DOCUMENT, 0], member);
|
||||
}
|
||||
throw new Error(`@smithy/core/schema - the schema ${this.getName(true)} does not have members.`);
|
||||
}
|
||||
/**
|
||||
* This can be used for checking the members as a hashmap.
|
||||
* Prefer the structIterator method for iteration.
|
||||
*
|
||||
* This does NOT return list and map members, it is only for structures.
|
||||
*
|
||||
* @returns a map of member names to member schemas (normalized).
|
||||
*/
|
||||
getMemberSchemas() {
|
||||
const { schema } = this;
|
||||
const struct2 = schema;
|
||||
if (!struct2 || typeof struct2 !== "object") {
|
||||
return {};
|
||||
}
|
||||
if ("members" in struct2) {
|
||||
const buffer = {};
|
||||
for (const member of struct2.memberNames) {
|
||||
buffer[member] = this.getMemberSchema(member);
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
return {};
|
||||
}
|
||||
/**
|
||||
* Allows iteration over members of a structure schema.
|
||||
* Each yield is a pair of the member name and member schema.
|
||||
*
|
||||
* This avoids the overhead of calling Object.entries(ns.getMemberSchemas()).
|
||||
*/
|
||||
*structIterator() {
|
||||
if (this.isUnitSchema()) {
|
||||
return;
|
||||
}
|
||||
if (!this.isStructSchema()) {
|
||||
throw new Error("@smithy/core/schema - cannot acquire structIterator on non-struct schema.");
|
||||
}
|
||||
const struct2 = this.getSchema();
|
||||
for (let i = 0; i < struct2.memberNames.length; ++i) {
|
||||
yield [struct2.memberNames[i], _NormalizedSchema.memberFrom([struct2.memberList[i], 0], struct2.memberNames[i])];
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @returns a last-resort human-readable name for the schema if it has no other identifiers.
|
||||
*/
|
||||
getSchemaName() {
|
||||
const schema = this.getSchema();
|
||||
if (typeof schema === "number") {
|
||||
const _schema = 63 & schema;
|
||||
const container = 192 & schema;
|
||||
const type = Object.entries(SCHEMA).find(([, value]) => {
|
||||
return value === _schema;
|
||||
})?.[0] ?? "Unknown";
|
||||
switch (container) {
|
||||
case SCHEMA.MAP_MODIFIER:
|
||||
return `${type}Map`;
|
||||
case SCHEMA.LIST_MODIFIER:
|
||||
return `${type}List`;
|
||||
case 0:
|
||||
return type;
|
||||
}
|
||||
}
|
||||
return "Unknown";
|
||||
}
|
||||
};
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
ErrorSchema,
|
||||
ListSchema,
|
||||
MapSchema,
|
||||
NormalizedSchema,
|
||||
OperationSchema,
|
||||
SCHEMA,
|
||||
Schema,
|
||||
SimpleSchema,
|
||||
StructureSchema,
|
||||
TypeRegistry,
|
||||
deref,
|
||||
deserializerMiddlewareOption,
|
||||
error,
|
||||
getSchemaSerdePlugin,
|
||||
list,
|
||||
map,
|
||||
op,
|
||||
serializerMiddlewareOption,
|
||||
sim,
|
||||
struct
|
||||
});
|
||||
+751
@@ -0,0 +1,751 @@
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
|
||||
// src/submodules/serde/index.ts
|
||||
var serde_exports = {};
|
||||
__export(serde_exports, {
|
||||
LazyJsonString: () => LazyJsonString,
|
||||
NumericValue: () => NumericValue,
|
||||
copyDocumentWithTransform: () => copyDocumentWithTransform,
|
||||
dateToUtcString: () => dateToUtcString,
|
||||
expectBoolean: () => expectBoolean,
|
||||
expectByte: () => expectByte,
|
||||
expectFloat32: () => expectFloat32,
|
||||
expectInt: () => expectInt,
|
||||
expectInt32: () => expectInt32,
|
||||
expectLong: () => expectLong,
|
||||
expectNonNull: () => expectNonNull,
|
||||
expectNumber: () => expectNumber,
|
||||
expectObject: () => expectObject,
|
||||
expectShort: () => expectShort,
|
||||
expectString: () => expectString,
|
||||
expectUnion: () => expectUnion,
|
||||
handleFloat: () => handleFloat,
|
||||
limitedParseDouble: () => limitedParseDouble,
|
||||
limitedParseFloat: () => limitedParseFloat,
|
||||
limitedParseFloat32: () => limitedParseFloat32,
|
||||
logger: () => logger,
|
||||
nv: () => nv,
|
||||
parseBoolean: () => parseBoolean,
|
||||
parseEpochTimestamp: () => parseEpochTimestamp,
|
||||
parseRfc3339DateTime: () => parseRfc3339DateTime,
|
||||
parseRfc3339DateTimeWithOffset: () => parseRfc3339DateTimeWithOffset,
|
||||
parseRfc7231DateTime: () => parseRfc7231DateTime,
|
||||
quoteHeader: () => quoteHeader,
|
||||
splitEvery: () => splitEvery,
|
||||
splitHeader: () => splitHeader,
|
||||
strictParseByte: () => strictParseByte,
|
||||
strictParseDouble: () => strictParseDouble,
|
||||
strictParseFloat: () => strictParseFloat,
|
||||
strictParseFloat32: () => strictParseFloat32,
|
||||
strictParseInt: () => strictParseInt,
|
||||
strictParseInt32: () => strictParseInt32,
|
||||
strictParseLong: () => strictParseLong,
|
||||
strictParseShort: () => strictParseShort
|
||||
});
|
||||
module.exports = __toCommonJS(serde_exports);
|
||||
|
||||
// src/submodules/serde/copyDocumentWithTransform.ts
|
||||
var import_schema = require("@smithy/core/schema");
|
||||
var copyDocumentWithTransform = (source, schemaRef, transform = (_) => _) => {
|
||||
const ns = import_schema.NormalizedSchema.of(schemaRef);
|
||||
switch (typeof source) {
|
||||
case "undefined":
|
||||
case "boolean":
|
||||
case "number":
|
||||
case "string":
|
||||
case "bigint":
|
||||
case "symbol":
|
||||
return transform(source, ns);
|
||||
case "function":
|
||||
case "object":
|
||||
if (source === null) {
|
||||
return transform(null, ns);
|
||||
}
|
||||
if (Array.isArray(source)) {
|
||||
const newArray = new Array(source.length);
|
||||
let i = 0;
|
||||
for (const item of source) {
|
||||
newArray[i++] = copyDocumentWithTransform(item, ns.getValueSchema(), transform);
|
||||
}
|
||||
return transform(newArray, ns);
|
||||
}
|
||||
if ("byteLength" in source) {
|
||||
const newBytes = new Uint8Array(source.byteLength);
|
||||
newBytes.set(source, 0);
|
||||
return transform(newBytes, ns);
|
||||
}
|
||||
if (source instanceof Date) {
|
||||
return transform(source, ns);
|
||||
}
|
||||
const newObject = {};
|
||||
if (ns.isMapSchema()) {
|
||||
for (const key of Object.keys(source)) {
|
||||
newObject[key] = copyDocumentWithTransform(source[key], ns.getValueSchema(), transform);
|
||||
}
|
||||
} else if (ns.isStructSchema()) {
|
||||
for (const [key, memberSchema] of ns.structIterator()) {
|
||||
newObject[key] = copyDocumentWithTransform(source[key], memberSchema, transform);
|
||||
}
|
||||
} else if (ns.isDocumentSchema()) {
|
||||
for (const key of Object.keys(source)) {
|
||||
newObject[key] = copyDocumentWithTransform(source[key], ns.getValueSchema(), transform);
|
||||
}
|
||||
}
|
||||
return transform(newObject, ns);
|
||||
default:
|
||||
return transform(source, ns);
|
||||
}
|
||||
};
|
||||
|
||||
// src/submodules/serde/parse-utils.ts
|
||||
var parseBoolean = (value) => {
|
||||
switch (value) {
|
||||
case "true":
|
||||
return true;
|
||||
case "false":
|
||||
return false;
|
||||
default:
|
||||
throw new Error(`Unable to parse boolean value "${value}"`);
|
||||
}
|
||||
};
|
||||
var expectBoolean = (value) => {
|
||||
if (value === null || value === void 0) {
|
||||
return void 0;
|
||||
}
|
||||
if (typeof value === "number") {
|
||||
if (value === 0 || value === 1) {
|
||||
logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`));
|
||||
}
|
||||
if (value === 0) {
|
||||
return false;
|
||||
}
|
||||
if (value === 1) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
if (typeof value === "string") {
|
||||
const lower = value.toLowerCase();
|
||||
if (lower === "false" || lower === "true") {
|
||||
logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`));
|
||||
}
|
||||
if (lower === "false") {
|
||||
return false;
|
||||
}
|
||||
if (lower === "true") {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
if (typeof value === "boolean") {
|
||||
return value;
|
||||
}
|
||||
throw new TypeError(`Expected boolean, got ${typeof value}: ${value}`);
|
||||
};
|
||||
var expectNumber = (value) => {
|
||||
if (value === null || value === void 0) {
|
||||
return void 0;
|
||||
}
|
||||
if (typeof value === "string") {
|
||||
const parsed = parseFloat(value);
|
||||
if (!Number.isNaN(parsed)) {
|
||||
if (String(parsed) !== String(value)) {
|
||||
logger.warn(stackTraceWarning(`Expected number but observed string: ${value}`));
|
||||
}
|
||||
return parsed;
|
||||
}
|
||||
}
|
||||
if (typeof value === "number") {
|
||||
return value;
|
||||
}
|
||||
throw new TypeError(`Expected number, got ${typeof value}: ${value}`);
|
||||
};
|
||||
var MAX_FLOAT = Math.ceil(2 ** 127 * (2 - 2 ** -23));
|
||||
var expectFloat32 = (value) => {
|
||||
const expected = expectNumber(value);
|
||||
if (expected !== void 0 && !Number.isNaN(expected) && expected !== Infinity && expected !== -Infinity) {
|
||||
if (Math.abs(expected) > MAX_FLOAT) {
|
||||
throw new TypeError(`Expected 32-bit float, got ${value}`);
|
||||
}
|
||||
}
|
||||
return expected;
|
||||
};
|
||||
var expectLong = (value) => {
|
||||
if (value === null || value === void 0) {
|
||||
return void 0;
|
||||
}
|
||||
if (Number.isInteger(value) && !Number.isNaN(value)) {
|
||||
return value;
|
||||
}
|
||||
throw new TypeError(`Expected integer, got ${typeof value}: ${value}`);
|
||||
};
|
||||
var expectInt = expectLong;
|
||||
var expectInt32 = (value) => expectSizedInt(value, 32);
|
||||
var expectShort = (value) => expectSizedInt(value, 16);
|
||||
var expectByte = (value) => expectSizedInt(value, 8);
|
||||
var expectSizedInt = (value, size) => {
|
||||
const expected = expectLong(value);
|
||||
if (expected !== void 0 && castInt(expected, size) !== expected) {
|
||||
throw new TypeError(`Expected ${size}-bit integer, got ${value}`);
|
||||
}
|
||||
return expected;
|
||||
};
|
||||
var castInt = (value, size) => {
|
||||
switch (size) {
|
||||
case 32:
|
||||
return Int32Array.of(value)[0];
|
||||
case 16:
|
||||
return Int16Array.of(value)[0];
|
||||
case 8:
|
||||
return Int8Array.of(value)[0];
|
||||
}
|
||||
};
|
||||
var expectNonNull = (value, location) => {
|
||||
if (value === null || value === void 0) {
|
||||
if (location) {
|
||||
throw new TypeError(`Expected a non-null value for ${location}`);
|
||||
}
|
||||
throw new TypeError("Expected a non-null value");
|
||||
}
|
||||
return value;
|
||||
};
|
||||
var expectObject = (value) => {
|
||||
if (value === null || value === void 0) {
|
||||
return void 0;
|
||||
}
|
||||
if (typeof value === "object" && !Array.isArray(value)) {
|
||||
return value;
|
||||
}
|
||||
const receivedType = Array.isArray(value) ? "array" : typeof value;
|
||||
throw new TypeError(`Expected object, got ${receivedType}: ${value}`);
|
||||
};
|
||||
var expectString = (value) => {
|
||||
if (value === null || value === void 0) {
|
||||
return void 0;
|
||||
}
|
||||
if (typeof value === "string") {
|
||||
return value;
|
||||
}
|
||||
if (["boolean", "number", "bigint"].includes(typeof value)) {
|
||||
logger.warn(stackTraceWarning(`Expected string, got ${typeof value}: ${value}`));
|
||||
return String(value);
|
||||
}
|
||||
throw new TypeError(`Expected string, got ${typeof value}: ${value}`);
|
||||
};
|
||||
var expectUnion = (value) => {
|
||||
if (value === null || value === void 0) {
|
||||
return void 0;
|
||||
}
|
||||
const asObject = expectObject(value);
|
||||
const setKeys = Object.entries(asObject).filter(([, v]) => v != null).map(([k]) => k);
|
||||
if (setKeys.length === 0) {
|
||||
throw new TypeError(`Unions must have exactly one non-null member. None were found.`);
|
||||
}
|
||||
if (setKeys.length > 1) {
|
||||
throw new TypeError(`Unions must have exactly one non-null member. Keys ${setKeys} were not null.`);
|
||||
}
|
||||
return asObject;
|
||||
};
|
||||
var strictParseDouble = (value) => {
|
||||
if (typeof value == "string") {
|
||||
return expectNumber(parseNumber(value));
|
||||
}
|
||||
return expectNumber(value);
|
||||
};
|
||||
var strictParseFloat = strictParseDouble;
|
||||
var strictParseFloat32 = (value) => {
|
||||
if (typeof value == "string") {
|
||||
return expectFloat32(parseNumber(value));
|
||||
}
|
||||
return expectFloat32(value);
|
||||
};
|
||||
var NUMBER_REGEX = /(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)|(-?Infinity)|(NaN)/g;
|
||||
var parseNumber = (value) => {
|
||||
const matches = value.match(NUMBER_REGEX);
|
||||
if (matches === null || matches[0].length !== value.length) {
|
||||
throw new TypeError(`Expected real number, got implicit NaN`);
|
||||
}
|
||||
return parseFloat(value);
|
||||
};
|
||||
var limitedParseDouble = (value) => {
|
||||
if (typeof value == "string") {
|
||||
return parseFloatString(value);
|
||||
}
|
||||
return expectNumber(value);
|
||||
};
|
||||
var handleFloat = limitedParseDouble;
|
||||
var limitedParseFloat = limitedParseDouble;
|
||||
var limitedParseFloat32 = (value) => {
|
||||
if (typeof value == "string") {
|
||||
return parseFloatString(value);
|
||||
}
|
||||
return expectFloat32(value);
|
||||
};
|
||||
var parseFloatString = (value) => {
|
||||
switch (value) {
|
||||
case "NaN":
|
||||
return NaN;
|
||||
case "Infinity":
|
||||
return Infinity;
|
||||
case "-Infinity":
|
||||
return -Infinity;
|
||||
default:
|
||||
throw new Error(`Unable to parse float value: ${value}`);
|
||||
}
|
||||
};
|
||||
var strictParseLong = (value) => {
|
||||
if (typeof value === "string") {
|
||||
return expectLong(parseNumber(value));
|
||||
}
|
||||
return expectLong(value);
|
||||
};
|
||||
var strictParseInt = strictParseLong;
|
||||
var strictParseInt32 = (value) => {
|
||||
if (typeof value === "string") {
|
||||
return expectInt32(parseNumber(value));
|
||||
}
|
||||
return expectInt32(value);
|
||||
};
|
||||
var strictParseShort = (value) => {
|
||||
if (typeof value === "string") {
|
||||
return expectShort(parseNumber(value));
|
||||
}
|
||||
return expectShort(value);
|
||||
};
|
||||
var strictParseByte = (value) => {
|
||||
if (typeof value === "string") {
|
||||
return expectByte(parseNumber(value));
|
||||
}
|
||||
return expectByte(value);
|
||||
};
|
||||
var stackTraceWarning = (message) => {
|
||||
return String(new TypeError(message).stack || message).split("\n").slice(0, 5).filter((s) => !s.includes("stackTraceWarning")).join("\n");
|
||||
};
|
||||
var logger = {
|
||||
warn: console.warn
|
||||
};
|
||||
|
||||
// src/submodules/serde/date-utils.ts
|
||||
var DAYS = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"];
|
||||
var MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"];
|
||||
function dateToUtcString(date) {
|
||||
const year = date.getUTCFullYear();
|
||||
const month = date.getUTCMonth();
|
||||
const dayOfWeek = date.getUTCDay();
|
||||
const dayOfMonthInt = date.getUTCDate();
|
||||
const hoursInt = date.getUTCHours();
|
||||
const minutesInt = date.getUTCMinutes();
|
||||
const secondsInt = date.getUTCSeconds();
|
||||
const dayOfMonthString = dayOfMonthInt < 10 ? `0${dayOfMonthInt}` : `${dayOfMonthInt}`;
|
||||
const hoursString = hoursInt < 10 ? `0${hoursInt}` : `${hoursInt}`;
|
||||
const minutesString = minutesInt < 10 ? `0${minutesInt}` : `${minutesInt}`;
|
||||
const secondsString = secondsInt < 10 ? `0${secondsInt}` : `${secondsInt}`;
|
||||
return `${DAYS[dayOfWeek]}, ${dayOfMonthString} ${MONTHS[month]} ${year} ${hoursString}:${minutesString}:${secondsString} GMT`;
|
||||
}
|
||||
var RFC3339 = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?[zZ]$/);
|
||||
var parseRfc3339DateTime = (value) => {
|
||||
if (value === null || value === void 0) {
|
||||
return void 0;
|
||||
}
|
||||
if (typeof value !== "string") {
|
||||
throw new TypeError("RFC-3339 date-times must be expressed as strings");
|
||||
}
|
||||
const match = RFC3339.exec(value);
|
||||
if (!match) {
|
||||
throw new TypeError("Invalid RFC-3339 date-time value");
|
||||
}
|
||||
const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds] = match;
|
||||
const year = strictParseShort(stripLeadingZeroes(yearStr));
|
||||
const month = parseDateValue(monthStr, "month", 1, 12);
|
||||
const day = parseDateValue(dayStr, "day", 1, 31);
|
||||
return buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds });
|
||||
};
|
||||
var RFC3339_WITH_OFFSET = new RegExp(
|
||||
/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?(([-+]\d{2}\:\d{2})|[zZ])$/
|
||||
);
|
||||
var parseRfc3339DateTimeWithOffset = (value) => {
|
||||
if (value === null || value === void 0) {
|
||||
return void 0;
|
||||
}
|
||||
if (typeof value !== "string") {
|
||||
throw new TypeError("RFC-3339 date-times must be expressed as strings");
|
||||
}
|
||||
const match = RFC3339_WITH_OFFSET.exec(value);
|
||||
if (!match) {
|
||||
throw new TypeError("Invalid RFC-3339 date-time value");
|
||||
}
|
||||
const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, offsetStr] = match;
|
||||
const year = strictParseShort(stripLeadingZeroes(yearStr));
|
||||
const month = parseDateValue(monthStr, "month", 1, 12);
|
||||
const day = parseDateValue(dayStr, "day", 1, 31);
|
||||
const date = buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds });
|
||||
if (offsetStr.toUpperCase() != "Z") {
|
||||
date.setTime(date.getTime() - parseOffsetToMilliseconds(offsetStr));
|
||||
}
|
||||
return date;
|
||||
};
|
||||
var IMF_FIXDATE = new RegExp(
|
||||
/^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), (\d{2}) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) (\d{4}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/
|
||||
);
|
||||
var RFC_850_DATE = new RegExp(
|
||||
/^(?:Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday), (\d{2})-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-(\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/
|
||||
);
|
||||
var ASC_TIME = new RegExp(
|
||||
/^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) ( [1-9]|\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? (\d{4})$/
|
||||
);
|
||||
var parseRfc7231DateTime = (value) => {
|
||||
if (value === null || value === void 0) {
|
||||
return void 0;
|
||||
}
|
||||
if (typeof value !== "string") {
|
||||
throw new TypeError("RFC-7231 date-times must be expressed as strings");
|
||||
}
|
||||
let match = IMF_FIXDATE.exec(value);
|
||||
if (match) {
|
||||
const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match;
|
||||
return buildDate(
|
||||
strictParseShort(stripLeadingZeroes(yearStr)),
|
||||
parseMonthByShortName(monthStr),
|
||||
parseDateValue(dayStr, "day", 1, 31),
|
||||
{ hours, minutes, seconds, fractionalMilliseconds }
|
||||
);
|
||||
}
|
||||
match = RFC_850_DATE.exec(value);
|
||||
if (match) {
|
||||
const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match;
|
||||
return adjustRfc850Year(
|
||||
buildDate(parseTwoDigitYear(yearStr), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), {
|
||||
hours,
|
||||
minutes,
|
||||
seconds,
|
||||
fractionalMilliseconds
|
||||
})
|
||||
);
|
||||
}
|
||||
match = ASC_TIME.exec(value);
|
||||
if (match) {
|
||||
const [_, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, yearStr] = match;
|
||||
return buildDate(
|
||||
strictParseShort(stripLeadingZeroes(yearStr)),
|
||||
parseMonthByShortName(monthStr),
|
||||
parseDateValue(dayStr.trimLeft(), "day", 1, 31),
|
||||
{ hours, minutes, seconds, fractionalMilliseconds }
|
||||
);
|
||||
}
|
||||
throw new TypeError("Invalid RFC-7231 date-time value");
|
||||
};
|
||||
var parseEpochTimestamp = (value) => {
|
||||
if (value === null || value === void 0) {
|
||||
return void 0;
|
||||
}
|
||||
let valueAsDouble;
|
||||
if (typeof value === "number") {
|
||||
valueAsDouble = value;
|
||||
} else if (typeof value === "string") {
|
||||
valueAsDouble = strictParseDouble(value);
|
||||
} else if (typeof value === "object" && value.tag === 1) {
|
||||
valueAsDouble = value.value;
|
||||
} else {
|
||||
throw new TypeError("Epoch timestamps must be expressed as floating point numbers or their string representation");
|
||||
}
|
||||
if (Number.isNaN(valueAsDouble) || valueAsDouble === Infinity || valueAsDouble === -Infinity) {
|
||||
throw new TypeError("Epoch timestamps must be valid, non-Infinite, non-NaN numerics");
|
||||
}
|
||||
return new Date(Math.round(valueAsDouble * 1e3));
|
||||
};
|
||||
var buildDate = (year, month, day, time) => {
|
||||
const adjustedMonth = month - 1;
|
||||
validateDayOfMonth(year, adjustedMonth, day);
|
||||
return new Date(
|
||||
Date.UTC(
|
||||
year,
|
||||
adjustedMonth,
|
||||
day,
|
||||
parseDateValue(time.hours, "hour", 0, 23),
|
||||
parseDateValue(time.minutes, "minute", 0, 59),
|
||||
// seconds can go up to 60 for leap seconds
|
||||
parseDateValue(time.seconds, "seconds", 0, 60),
|
||||
parseMilliseconds(time.fractionalMilliseconds)
|
||||
)
|
||||
);
|
||||
};
|
||||
var parseTwoDigitYear = (value) => {
|
||||
const thisYear = (/* @__PURE__ */ new Date()).getUTCFullYear();
|
||||
const valueInThisCentury = Math.floor(thisYear / 100) * 100 + strictParseShort(stripLeadingZeroes(value));
|
||||
if (valueInThisCentury < thisYear) {
|
||||
return valueInThisCentury + 100;
|
||||
}
|
||||
return valueInThisCentury;
|
||||
};
|
||||
var FIFTY_YEARS_IN_MILLIS = 50 * 365 * 24 * 60 * 60 * 1e3;
|
||||
var adjustRfc850Year = (input) => {
|
||||
if (input.getTime() - (/* @__PURE__ */ new Date()).getTime() > FIFTY_YEARS_IN_MILLIS) {
|
||||
return new Date(
|
||||
Date.UTC(
|
||||
input.getUTCFullYear() - 100,
|
||||
input.getUTCMonth(),
|
||||
input.getUTCDate(),
|
||||
input.getUTCHours(),
|
||||
input.getUTCMinutes(),
|
||||
input.getUTCSeconds(),
|
||||
input.getUTCMilliseconds()
|
||||
)
|
||||
);
|
||||
}
|
||||
return input;
|
||||
};
|
||||
var parseMonthByShortName = (value) => {
|
||||
const monthIdx = MONTHS.indexOf(value);
|
||||
if (monthIdx < 0) {
|
||||
throw new TypeError(`Invalid month: ${value}`);
|
||||
}
|
||||
return monthIdx + 1;
|
||||
};
|
||||
var DAYS_IN_MONTH = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31];
|
||||
var validateDayOfMonth = (year, month, day) => {
|
||||
let maxDays = DAYS_IN_MONTH[month];
|
||||
if (month === 1 && isLeapYear(year)) {
|
||||
maxDays = 29;
|
||||
}
|
||||
if (day > maxDays) {
|
||||
throw new TypeError(`Invalid day for ${MONTHS[month]} in ${year}: ${day}`);
|
||||
}
|
||||
};
|
||||
var isLeapYear = (year) => {
|
||||
return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0);
|
||||
};
|
||||
var parseDateValue = (value, type, lower, upper) => {
|
||||
const dateVal = strictParseByte(stripLeadingZeroes(value));
|
||||
if (dateVal < lower || dateVal > upper) {
|
||||
throw new TypeError(`${type} must be between ${lower} and ${upper}, inclusive`);
|
||||
}
|
||||
return dateVal;
|
||||
};
|
||||
var parseMilliseconds = (value) => {
|
||||
if (value === null || value === void 0) {
|
||||
return 0;
|
||||
}
|
||||
return strictParseFloat32("0." + value) * 1e3;
|
||||
};
|
||||
var parseOffsetToMilliseconds = (value) => {
|
||||
const directionStr = value[0];
|
||||
let direction = 1;
|
||||
if (directionStr == "+") {
|
||||
direction = 1;
|
||||
} else if (directionStr == "-") {
|
||||
direction = -1;
|
||||
} else {
|
||||
throw new TypeError(`Offset direction, ${directionStr}, must be "+" or "-"`);
|
||||
}
|
||||
const hour = Number(value.substring(1, 3));
|
||||
const minute = Number(value.substring(4, 6));
|
||||
return direction * (hour * 60 + minute) * 60 * 1e3;
|
||||
};
|
||||
var stripLeadingZeroes = (value) => {
|
||||
let idx = 0;
|
||||
while (idx < value.length - 1 && value.charAt(idx) === "0") {
|
||||
idx++;
|
||||
}
|
||||
if (idx === 0) {
|
||||
return value;
|
||||
}
|
||||
return value.slice(idx);
|
||||
};
|
||||
|
||||
// src/submodules/serde/lazy-json.ts
|
||||
var LazyJsonString = function LazyJsonString2(val) {
|
||||
const str = Object.assign(new String(val), {
|
||||
deserializeJSON() {
|
||||
return JSON.parse(String(val));
|
||||
},
|
||||
toString() {
|
||||
return String(val);
|
||||
},
|
||||
toJSON() {
|
||||
return String(val);
|
||||
}
|
||||
});
|
||||
return str;
|
||||
};
|
||||
LazyJsonString.from = (object) => {
|
||||
if (object && typeof object === "object" && (object instanceof LazyJsonString || "deserializeJSON" in object)) {
|
||||
return object;
|
||||
} else if (typeof object === "string" || Object.getPrototypeOf(object) === String.prototype) {
|
||||
return LazyJsonString(String(object));
|
||||
}
|
||||
return LazyJsonString(JSON.stringify(object));
|
||||
};
|
||||
LazyJsonString.fromObject = LazyJsonString.from;
|
||||
|
||||
// src/submodules/serde/quote-header.ts
|
||||
function quoteHeader(part) {
|
||||
if (part.includes(",") || part.includes('"')) {
|
||||
part = `"${part.replace(/"/g, '\\"')}"`;
|
||||
}
|
||||
return part;
|
||||
}
|
||||
|
||||
// src/submodules/serde/split-every.ts
|
||||
function splitEvery(value, delimiter, numDelimiters) {
|
||||
if (numDelimiters <= 0 || !Number.isInteger(numDelimiters)) {
|
||||
throw new Error("Invalid number of delimiters (" + numDelimiters + ") for splitEvery.");
|
||||
}
|
||||
const segments = value.split(delimiter);
|
||||
if (numDelimiters === 1) {
|
||||
return segments;
|
||||
}
|
||||
const compoundSegments = [];
|
||||
let currentSegment = "";
|
||||
for (let i = 0; i < segments.length; i++) {
|
||||
if (currentSegment === "") {
|
||||
currentSegment = segments[i];
|
||||
} else {
|
||||
currentSegment += delimiter + segments[i];
|
||||
}
|
||||
if ((i + 1) % numDelimiters === 0) {
|
||||
compoundSegments.push(currentSegment);
|
||||
currentSegment = "";
|
||||
}
|
||||
}
|
||||
if (currentSegment !== "") {
|
||||
compoundSegments.push(currentSegment);
|
||||
}
|
||||
return compoundSegments;
|
||||
}
|
||||
|
||||
// src/submodules/serde/split-header.ts
|
||||
var splitHeader = (value) => {
|
||||
const z = value.length;
|
||||
const values = [];
|
||||
let withinQuotes = false;
|
||||
let prevChar = void 0;
|
||||
let anchor = 0;
|
||||
for (let i = 0; i < z; ++i) {
|
||||
const char = value[i];
|
||||
switch (char) {
|
||||
case `"`:
|
||||
if (prevChar !== "\\") {
|
||||
withinQuotes = !withinQuotes;
|
||||
}
|
||||
break;
|
||||
case ",":
|
||||
if (!withinQuotes) {
|
||||
values.push(value.slice(anchor, i));
|
||||
anchor = i + 1;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
}
|
||||
prevChar = char;
|
||||
}
|
||||
values.push(value.slice(anchor));
|
||||
return values.map((v) => {
|
||||
v = v.trim();
|
||||
const z2 = v.length;
|
||||
if (z2 < 2) {
|
||||
return v;
|
||||
}
|
||||
if (v[0] === `"` && v[z2 - 1] === `"`) {
|
||||
v = v.slice(1, z2 - 1);
|
||||
}
|
||||
return v.replace(/\\"/g, '"');
|
||||
});
|
||||
};
|
||||
|
||||
// src/submodules/serde/value/NumericValue.ts
|
||||
var NumericValue = class {
|
||||
constructor(string, type) {
|
||||
this.string = string;
|
||||
this.type = type;
|
||||
let dot = 0;
|
||||
for (let i = 0; i < string.length; ++i) {
|
||||
const char = string.charCodeAt(i);
|
||||
if (i === 0 && char === 45) {
|
||||
continue;
|
||||
}
|
||||
if (char === 46) {
|
||||
if (dot) {
|
||||
throw new Error("@smithy/core/serde - NumericValue must contain at most one decimal point.");
|
||||
}
|
||||
dot = 1;
|
||||
continue;
|
||||
}
|
||||
if (char < 48 || char > 57) {
|
||||
throw new Error(
|
||||
`@smithy/core/serde - NumericValue must only contain [0-9], at most one decimal point ".", and an optional negation prefix "-".`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
toString() {
|
||||
return this.string;
|
||||
}
|
||||
[Symbol.hasInstance](object) {
|
||||
if (!object || typeof object !== "object") {
|
||||
return false;
|
||||
}
|
||||
const _nv = object;
|
||||
if (typeof _nv.string === "string" && typeof _nv.type === "string" && _nv.constructor?.name === "NumericValue") {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
function nv(input) {
|
||||
return new NumericValue(String(input), "bigDecimal");
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
LazyJsonString,
|
||||
NumericValue,
|
||||
copyDocumentWithTransform,
|
||||
dateToUtcString,
|
||||
expectBoolean,
|
||||
expectByte,
|
||||
expectFloat32,
|
||||
expectInt,
|
||||
expectInt32,
|
||||
expectLong,
|
||||
expectNonNull,
|
||||
expectNumber,
|
||||
expectObject,
|
||||
expectShort,
|
||||
expectString,
|
||||
expectUnion,
|
||||
handleFloat,
|
||||
limitedParseDouble,
|
||||
limitedParseFloat,
|
||||
limitedParseFloat32,
|
||||
logger,
|
||||
nv,
|
||||
parseBoolean,
|
||||
parseEpochTimestamp,
|
||||
parseRfc3339DateTime,
|
||||
parseRfc3339DateTimeWithOffset,
|
||||
parseRfc7231DateTime,
|
||||
quoteHeader,
|
||||
splitEvery,
|
||||
splitHeader,
|
||||
strictParseByte,
|
||||
strictParseDouble,
|
||||
strictParseFloat,
|
||||
strictParseFloat32,
|
||||
strictParseInt,
|
||||
strictParseInt32,
|
||||
strictParseLong,
|
||||
strictParseShort
|
||||
});
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
module.exports = require("../index.js");
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
module.exports = require("../../index.js");
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
module.exports = require("../../index.js");
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
module.exports = require("../../index.js");
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
module.exports = require("../../index.js");
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
module.exports = require("../index.js");
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
module.exports = require("../index.js");
|
||||
+2
@@ -0,0 +1,2 @@
|
||||
import { SMITHY_CONTEXT_KEY } from "@smithy/types";
|
||||
export const getSmithyContext = (context) => context[SMITHY_CONTEXT_KEY] || (context[SMITHY_CONTEXT_KEY] = {});
|
||||
+8
@@ -0,0 +1,8 @@
|
||||
export * from "./getSmithyContext";
|
||||
export * from "./middleware-http-auth-scheme";
|
||||
export * from "./middleware-http-signing";
|
||||
export * from "./normalizeProvider";
|
||||
export { createPaginator } from "./pagination/createPaginator";
|
||||
export * from "./protocols/requestBuilder";
|
||||
export * from "./setFeature";
|
||||
export * from "./util-identity-and-auth";
|
||||
Generated
Vendored
+17
@@ -0,0 +1,17 @@
|
||||
import { httpAuthSchemeMiddleware } from "./httpAuthSchemeMiddleware";
|
||||
export const httpAuthSchemeEndpointRuleSetMiddlewareOptions = {
|
||||
step: "serialize",
|
||||
tags: ["HTTP_AUTH_SCHEME"],
|
||||
name: "httpAuthSchemeMiddleware",
|
||||
override: true,
|
||||
relation: "before",
|
||||
toMiddleware: "endpointV2Middleware",
|
||||
};
|
||||
export const getHttpAuthSchemeEndpointRuleSetPlugin = (config, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }) => ({
|
||||
applyToStack: (clientStack) => {
|
||||
clientStack.addRelativeTo(httpAuthSchemeMiddleware(config, {
|
||||
httpAuthSchemeParametersProvider,
|
||||
identityProviderConfigProvider,
|
||||
}), httpAuthSchemeEndpointRuleSetMiddlewareOptions);
|
||||
},
|
||||
});
|
||||
Generated
Vendored
+18
@@ -0,0 +1,18 @@
|
||||
import { serializerMiddlewareOption } from "@smithy/middleware-serde";
|
||||
import { httpAuthSchemeMiddleware } from "./httpAuthSchemeMiddleware";
|
||||
export const httpAuthSchemeMiddlewareOptions = {
|
||||
step: "serialize",
|
||||
tags: ["HTTP_AUTH_SCHEME"],
|
||||
name: "httpAuthSchemeMiddleware",
|
||||
override: true,
|
||||
relation: "before",
|
||||
toMiddleware: serializerMiddlewareOption.name,
|
||||
};
|
||||
export const getHttpAuthSchemePlugin = (config, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }) => ({
|
||||
applyToStack: (clientStack) => {
|
||||
clientStack.addRelativeTo(httpAuthSchemeMiddleware(config, {
|
||||
httpAuthSchemeParametersProvider,
|
||||
identityProviderConfigProvider,
|
||||
}), httpAuthSchemeMiddlewareOptions);
|
||||
},
|
||||
});
|
||||
Generated
Vendored
+43
@@ -0,0 +1,43 @@
|
||||
import { SMITHY_CONTEXT_KEY, } from "@smithy/types";
|
||||
import { getSmithyContext } from "@smithy/util-middleware";
|
||||
import { resolveAuthOptions } from "./resolveAuthOptions";
|
||||
function convertHttpAuthSchemesToMap(httpAuthSchemes) {
|
||||
const map = new Map();
|
||||
for (const scheme of httpAuthSchemes) {
|
||||
map.set(scheme.schemeId, scheme);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
export const httpAuthSchemeMiddleware = (config, mwOptions) => (next, context) => async (args) => {
|
||||
const options = config.httpAuthSchemeProvider(await mwOptions.httpAuthSchemeParametersProvider(config, context, args.input));
|
||||
const authSchemePreference = config.authSchemePreference ? await config.authSchemePreference() : [];
|
||||
const resolvedOptions = resolveAuthOptions(options, authSchemePreference);
|
||||
const authSchemes = convertHttpAuthSchemesToMap(config.httpAuthSchemes);
|
||||
const smithyContext = getSmithyContext(context);
|
||||
const failureReasons = [];
|
||||
for (const option of resolvedOptions) {
|
||||
const scheme = authSchemes.get(option.schemeId);
|
||||
if (!scheme) {
|
||||
failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` was not enabled for this service.`);
|
||||
continue;
|
||||
}
|
||||
const identityProvider = scheme.identityProvider(await mwOptions.identityProviderConfigProvider(config));
|
||||
if (!identityProvider) {
|
||||
failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` did not have an IdentityProvider configured.`);
|
||||
continue;
|
||||
}
|
||||
const { identityProperties = {}, signingProperties = {} } = option.propertiesExtractor?.(config, context) || {};
|
||||
option.identityProperties = Object.assign(option.identityProperties || {}, identityProperties);
|
||||
option.signingProperties = Object.assign(option.signingProperties || {}, signingProperties);
|
||||
smithyContext.selectedHttpAuthScheme = {
|
||||
httpAuthOption: option,
|
||||
identity: await identityProvider(option.identityProperties),
|
||||
signer: scheme.signer,
|
||||
};
|
||||
break;
|
||||
}
|
||||
if (!smithyContext.selectedHttpAuthScheme) {
|
||||
throw new Error(failureReasons.join("\n"));
|
||||
}
|
||||
return next(args);
|
||||
};
|
||||
Generated
Vendored
+3
@@ -0,0 +1,3 @@
|
||||
export * from "./httpAuthSchemeMiddleware";
|
||||
export * from "./getHttpAuthSchemeEndpointRuleSetPlugin";
|
||||
export * from "./getHttpAuthSchemePlugin";
|
||||
Generated
Vendored
+20
@@ -0,0 +1,20 @@
|
||||
export const resolveAuthOptions = (candidateAuthOptions, authSchemePreference) => {
|
||||
if (!authSchemePreference || authSchemePreference.length === 0) {
|
||||
return candidateAuthOptions;
|
||||
}
|
||||
const preferredAuthOptions = [];
|
||||
for (const preferredSchemeName of authSchemePreference) {
|
||||
for (const candidateAuthOption of candidateAuthOptions) {
|
||||
const candidateAuthSchemeName = candidateAuthOption.schemeId.split("#")[1];
|
||||
if (candidateAuthSchemeName === preferredSchemeName) {
|
||||
preferredAuthOptions.push(candidateAuthOption);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const candidateAuthOption of candidateAuthOptions) {
|
||||
if (!preferredAuthOptions.find(({ schemeId }) => schemeId === candidateAuthOption.schemeId)) {
|
||||
preferredAuthOptions.push(candidateAuthOption);
|
||||
}
|
||||
}
|
||||
return preferredAuthOptions;
|
||||
};
|
||||
Generated
Vendored
+15
@@ -0,0 +1,15 @@
|
||||
import { httpSigningMiddleware } from "./httpSigningMiddleware";
|
||||
export const httpSigningMiddlewareOptions = {
|
||||
step: "finalizeRequest",
|
||||
tags: ["HTTP_SIGNING"],
|
||||
name: "httpSigningMiddleware",
|
||||
aliases: ["apiKeyMiddleware", "tokenMiddleware", "awsAuthMiddleware"],
|
||||
override: true,
|
||||
relation: "after",
|
||||
toMiddleware: "retryMiddleware",
|
||||
};
|
||||
export const getHttpSigningPlugin = (config) => ({
|
||||
applyToStack: (clientStack) => {
|
||||
clientStack.addRelativeTo(httpSigningMiddleware(config), httpSigningMiddlewareOptions);
|
||||
},
|
||||
});
|
||||
Generated
Vendored
+24
@@ -0,0 +1,24 @@
|
||||
import { HttpRequest } from "@smithy/protocol-http";
|
||||
import { SMITHY_CONTEXT_KEY, } from "@smithy/types";
|
||||
import { getSmithyContext } from "@smithy/util-middleware";
|
||||
const defaultErrorHandler = (signingProperties) => (error) => {
|
||||
throw error;
|
||||
};
|
||||
const defaultSuccessHandler = (httpResponse, signingProperties) => { };
|
||||
export const httpSigningMiddleware = (config) => (next, context) => async (args) => {
|
||||
if (!HttpRequest.isInstance(args.request)) {
|
||||
return next(args);
|
||||
}
|
||||
const smithyContext = getSmithyContext(context);
|
||||
const scheme = smithyContext.selectedHttpAuthScheme;
|
||||
if (!scheme) {
|
||||
throw new Error(`No HttpAuthScheme was selected: unable to sign request`);
|
||||
}
|
||||
const { httpAuthOption: { signingProperties = {} }, identity, signer, } = scheme;
|
||||
const output = await next({
|
||||
...args,
|
||||
request: await signer.sign(args.request, identity, signingProperties),
|
||||
}).catch((signer.errorHandler || defaultErrorHandler)(signingProperties));
|
||||
(signer.successHandler || defaultSuccessHandler)(output.response, signingProperties);
|
||||
return output;
|
||||
};
|
||||
Generated
Vendored
+2
@@ -0,0 +1,2 @@
|
||||
export * from "./httpSigningMiddleware";
|
||||
export * from "./getHttpSigningMiddleware";
|
||||
+6
@@ -0,0 +1,6 @@
|
||||
export const normalizeProvider = (input) => {
|
||||
if (typeof input === "function")
|
||||
return input;
|
||||
const promisified = Promise.resolve(input);
|
||||
return () => promisified;
|
||||
};
|
||||
Generated
Vendored
+41
@@ -0,0 +1,41 @@
|
||||
const makePagedClientRequest = async (CommandCtor, client, input, withCommand = (_) => _, ...args) => {
|
||||
let command = new CommandCtor(input);
|
||||
command = withCommand(command) ?? command;
|
||||
return await client.send(command, ...args);
|
||||
};
|
||||
export function createPaginator(ClientCtor, CommandCtor, inputTokenName, outputTokenName, pageSizeTokenName) {
|
||||
return async function* paginateOperation(config, input, ...additionalArguments) {
|
||||
const _input = input;
|
||||
let token = config.startingToken ?? _input[inputTokenName];
|
||||
let hasNext = true;
|
||||
let page;
|
||||
while (hasNext) {
|
||||
_input[inputTokenName] = token;
|
||||
if (pageSizeTokenName) {
|
||||
_input[pageSizeTokenName] = _input[pageSizeTokenName] ?? config.pageSize;
|
||||
}
|
||||
if (config.client instanceof ClientCtor) {
|
||||
page = await makePagedClientRequest(CommandCtor, config.client, input, config.withCommand, ...additionalArguments);
|
||||
}
|
||||
else {
|
||||
throw new Error(`Invalid client, expected instance of ${ClientCtor.name}`);
|
||||
}
|
||||
yield page;
|
||||
const prevToken = token;
|
||||
token = get(page, outputTokenName);
|
||||
hasNext = !!(token && (!config.stopOnSameToken || token !== prevToken));
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
}
|
||||
const get = (fromObject, path) => {
|
||||
let cursor = fromObject;
|
||||
const pathComponents = path.split(".");
|
||||
for (const step of pathComponents) {
|
||||
if (!cursor || typeof cursor !== "object") {
|
||||
return undefined;
|
||||
}
|
||||
cursor = cursor[step];
|
||||
}
|
||||
return cursor;
|
||||
};
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
export { requestBuilder } from "@smithy/core/protocols";
|
||||
+11
@@ -0,0 +1,11 @@
|
||||
export function setFeature(context, feature, value) {
|
||||
if (!context.__smithy_context) {
|
||||
context.__smithy_context = {
|
||||
features: {},
|
||||
};
|
||||
}
|
||||
else if (!context.__smithy_context.features) {
|
||||
context.__smithy_context.features = {};
|
||||
}
|
||||
context.__smithy_context.features[feature] = value;
|
||||
}
|
||||
Generated
Vendored
+136
@@ -0,0 +1,136 @@
|
||||
import { NormalizedSchema } from "@smithy/core/schema";
|
||||
import { copyDocumentWithTransform, parseEpochTimestamp } from "@smithy/core/serde";
|
||||
import { cbor } from "./cbor";
|
||||
import { dateToTag } from "./parseCborBody";
|
||||
export class CborCodec {
|
||||
createSerializer() {
|
||||
const serializer = new CborShapeSerializer();
|
||||
serializer.setSerdeContext(this.serdeContext);
|
||||
return serializer;
|
||||
}
|
||||
createDeserializer() {
|
||||
const deserializer = new CborShapeDeserializer();
|
||||
deserializer.setSerdeContext(this.serdeContext);
|
||||
return deserializer;
|
||||
}
|
||||
setSerdeContext(serdeContext) {
|
||||
this.serdeContext = serdeContext;
|
||||
}
|
||||
}
|
||||
export class CborShapeSerializer {
|
||||
setSerdeContext(serdeContext) {
|
||||
this.serdeContext = serdeContext;
|
||||
}
|
||||
write(schema, value) {
|
||||
this.value = copyDocumentWithTransform(value, schema, (_, schemaRef) => {
|
||||
if (_ instanceof Date) {
|
||||
return dateToTag(_);
|
||||
}
|
||||
if (_ instanceof Uint8Array) {
|
||||
return _;
|
||||
}
|
||||
const ns = NormalizedSchema.of(schemaRef);
|
||||
const sparse = !!ns.getMergedTraits().sparse;
|
||||
if (ns.isListSchema() && Array.isArray(_)) {
|
||||
if (!sparse) {
|
||||
return _.filter((item) => item != null);
|
||||
}
|
||||
}
|
||||
else if (_ && typeof _ === "object") {
|
||||
const members = ns.getMemberSchemas();
|
||||
const isStruct = ns.isStructSchema();
|
||||
if (!sparse || isStruct) {
|
||||
for (const [k, v] of Object.entries(_)) {
|
||||
const filteredOutByNonSparse = !sparse && v == null;
|
||||
const filteredOutByUnrecognizedMember = isStruct && !(k in members);
|
||||
if (filteredOutByNonSparse || filteredOutByUnrecognizedMember) {
|
||||
delete _[k];
|
||||
}
|
||||
}
|
||||
return _;
|
||||
}
|
||||
}
|
||||
return _;
|
||||
});
|
||||
}
|
||||
flush() {
|
||||
const buffer = cbor.serialize(this.value);
|
||||
this.value = undefined;
|
||||
return buffer;
|
||||
}
|
||||
}
|
||||
export class CborShapeDeserializer {
|
||||
setSerdeContext(serdeContext) {
|
||||
this.serdeContext = serdeContext;
|
||||
}
|
||||
read(schema, bytes) {
|
||||
const data = cbor.deserialize(bytes);
|
||||
return this.readValue(schema, data);
|
||||
}
|
||||
readValue(_schema, value) {
|
||||
const ns = NormalizedSchema.of(_schema);
|
||||
const schema = ns.getSchema();
|
||||
if (typeof schema === "number") {
|
||||
if (ns.isTimestampSchema()) {
|
||||
return parseEpochTimestamp(value);
|
||||
}
|
||||
if (ns.isBlobSchema()) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
if (typeof value === "undefined" ||
|
||||
typeof value === "boolean" ||
|
||||
typeof value === "number" ||
|
||||
typeof value === "string" ||
|
||||
typeof value === "bigint" ||
|
||||
typeof value === "symbol") {
|
||||
return value;
|
||||
}
|
||||
else if (typeof value === "function" || typeof value === "object") {
|
||||
if (value === null) {
|
||||
return null;
|
||||
}
|
||||
if ("byteLength" in value) {
|
||||
return value;
|
||||
}
|
||||
if (value instanceof Date) {
|
||||
return value;
|
||||
}
|
||||
if (ns.isDocumentSchema()) {
|
||||
return value;
|
||||
}
|
||||
if (ns.isListSchema()) {
|
||||
const newArray = [];
|
||||
const memberSchema = ns.getValueSchema();
|
||||
const sparse = ns.isListSchema() && !!ns.getMergedTraits().sparse;
|
||||
for (const item of value) {
|
||||
newArray.push(this.readValue(memberSchema, item));
|
||||
if (!sparse && newArray[newArray.length - 1] == null) {
|
||||
newArray.pop();
|
||||
}
|
||||
}
|
||||
return newArray;
|
||||
}
|
||||
const newObject = {};
|
||||
if (ns.isMapSchema()) {
|
||||
const sparse = ns.getMergedTraits().sparse;
|
||||
const targetSchema = ns.getValueSchema();
|
||||
for (const key of Object.keys(value)) {
|
||||
newObject[key] = this.readValue(targetSchema, value[key]);
|
||||
if (newObject[key] == null && !sparse) {
|
||||
delete newObject[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (ns.isStructSchema()) {
|
||||
for (const [key, memberSchema] of ns.structIterator()) {
|
||||
newObject[key] = this.readValue(memberSchema, value[key]);
|
||||
}
|
||||
}
|
||||
return newObject;
|
||||
}
|
||||
else {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
}
|
||||
Generated
Vendored
+74
@@ -0,0 +1,74 @@
|
||||
import { RpcProtocol } from "@smithy/core/protocols";
|
||||
import { deref, TypeRegistry } from "@smithy/core/schema";
|
||||
import { getSmithyContext } from "@smithy/util-middleware";
|
||||
import { CborCodec } from "./CborCodec";
|
||||
import { loadSmithyRpcV2CborErrorCode } from "./parseCborBody";
|
||||
export class SmithyRpcV2CborProtocol extends RpcProtocol {
|
||||
constructor({ defaultNamespace }) {
|
||||
super({ defaultNamespace });
|
||||
this.codec = new CborCodec();
|
||||
this.serializer = this.codec.createSerializer();
|
||||
this.deserializer = this.codec.createDeserializer();
|
||||
}
|
||||
getShapeId() {
|
||||
return "smithy.protocols#rpcv2Cbor";
|
||||
}
|
||||
getPayloadCodec() {
|
||||
return this.codec;
|
||||
}
|
||||
async serializeRequest(operationSchema, input, context) {
|
||||
const request = await super.serializeRequest(operationSchema, input, context);
|
||||
Object.assign(request.headers, {
|
||||
"content-type": "application/cbor",
|
||||
"smithy-protocol": "rpc-v2-cbor",
|
||||
accept: "application/cbor",
|
||||
});
|
||||
if (deref(operationSchema.input) === "unit") {
|
||||
delete request.body;
|
||||
delete request.headers["content-type"];
|
||||
}
|
||||
else {
|
||||
if (!request.body) {
|
||||
this.serializer.write(15, {});
|
||||
request.body = this.serializer.flush();
|
||||
}
|
||||
try {
|
||||
request.headers["content-length"] = String(request.body.byteLength);
|
||||
}
|
||||
catch (e) { }
|
||||
}
|
||||
const { service, operation } = getSmithyContext(context);
|
||||
const path = `/service/${service}/operation/${operation}`;
|
||||
if (request.path.endsWith("/")) {
|
||||
request.path += path.slice(1);
|
||||
}
|
||||
else {
|
||||
request.path += path;
|
||||
}
|
||||
return request;
|
||||
}
|
||||
async deserializeResponse(operationSchema, context, response) {
|
||||
return super.deserializeResponse(operationSchema, context, response);
|
||||
}
|
||||
async handleError(operationSchema, context, response, dataObject, metadata) {
|
||||
const error = loadSmithyRpcV2CborErrorCode(response, dataObject) ?? "Unknown";
|
||||
let namespace = this.options.defaultNamespace;
|
||||
if (error.includes("#")) {
|
||||
[namespace] = error.split("#");
|
||||
}
|
||||
const registry = TypeRegistry.for(namespace);
|
||||
const errorSchema = registry.getSchema(error);
|
||||
if (!errorSchema) {
|
||||
throw new Error("schema not found for " + error);
|
||||
}
|
||||
const message = dataObject.message ?? dataObject.Message ?? "Unknown";
|
||||
const exception = new errorSchema.ctor(message);
|
||||
Object.assign(exception, {
|
||||
$metadata: metadata,
|
||||
$response: response,
|
||||
message,
|
||||
...dataObject,
|
||||
});
|
||||
throw exception;
|
||||
}
|
||||
}
|
||||
Generated
Vendored
+3
@@ -0,0 +1,3 @@
|
||||
export function printBytes(bytes) {
|
||||
return [...bytes].map((n) => ("0".repeat(8) + n.toString(2)).slice(-8) + ` (${n})`);
|
||||
}
|
||||
Generated
Vendored
+411
@@ -0,0 +1,411 @@
|
||||
import { NumericValue } from "@smithy/core/serde";
|
||||
import { toUtf8 } from "@smithy/util-utf8";
|
||||
import { alloc, extendedFloat16, extendedFloat32, extendedFloat64, extendedOneByte, majorList, majorMap, majorNegativeInt64, majorTag, majorUint64, majorUnstructuredByteString, majorUtf8String, minorIndefinite, specialFalse, specialNull, specialTrue, specialUndefined, tag, } from "./cbor-types";
|
||||
const USE_TEXT_DECODER = typeof TextDecoder !== "undefined";
|
||||
const USE_BUFFER = typeof Buffer !== "undefined";
|
||||
let payload = alloc(0);
|
||||
let dataView = new DataView(payload.buffer, payload.byteOffset, payload.byteLength);
|
||||
const textDecoder = USE_TEXT_DECODER ? new TextDecoder() : null;
|
||||
let _offset = 0;
|
||||
export function setPayload(bytes) {
|
||||
payload = bytes;
|
||||
dataView = new DataView(payload.buffer, payload.byteOffset, payload.byteLength);
|
||||
}
|
||||
export function decode(at, to) {
|
||||
if (at >= to) {
|
||||
throw new Error("unexpected end of (decode) payload.");
|
||||
}
|
||||
const major = (payload[at] & 224) >> 5;
|
||||
const minor = payload[at] & 31;
|
||||
switch (major) {
|
||||
case majorUint64:
|
||||
case majorNegativeInt64:
|
||||
case majorTag:
|
||||
let unsignedInt;
|
||||
let offset;
|
||||
if (minor < 24) {
|
||||
unsignedInt = minor;
|
||||
offset = 1;
|
||||
}
|
||||
else {
|
||||
switch (minor) {
|
||||
case extendedOneByte:
|
||||
case extendedFloat16:
|
||||
case extendedFloat32:
|
||||
case extendedFloat64:
|
||||
const countLength = minorValueToArgumentLength[minor];
|
||||
const countOffset = (countLength + 1);
|
||||
offset = countOffset;
|
||||
if (to - at < countOffset) {
|
||||
throw new Error(`countLength ${countLength} greater than remaining buf len.`);
|
||||
}
|
||||
const countIndex = at + 1;
|
||||
if (countLength === 1) {
|
||||
unsignedInt = payload[countIndex];
|
||||
}
|
||||
else if (countLength === 2) {
|
||||
unsignedInt = dataView.getUint16(countIndex);
|
||||
}
|
||||
else if (countLength === 4) {
|
||||
unsignedInt = dataView.getUint32(countIndex);
|
||||
}
|
||||
else {
|
||||
unsignedInt = dataView.getBigUint64(countIndex);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new Error(`unexpected minor value ${minor}.`);
|
||||
}
|
||||
}
|
||||
if (major === majorUint64) {
|
||||
_offset = offset;
|
||||
return castBigInt(unsignedInt);
|
||||
}
|
||||
else if (major === majorNegativeInt64) {
|
||||
let negativeInt;
|
||||
if (typeof unsignedInt === "bigint") {
|
||||
negativeInt = BigInt(-1) - unsignedInt;
|
||||
}
|
||||
else {
|
||||
negativeInt = -1 - unsignedInt;
|
||||
}
|
||||
_offset = offset;
|
||||
return castBigInt(negativeInt);
|
||||
}
|
||||
else {
|
||||
if (minor === 2 || minor === 3) {
|
||||
const length = decodeCount(at + offset, to);
|
||||
let b = BigInt(0);
|
||||
const start = at + offset + _offset;
|
||||
for (let i = start; i < start + length; ++i) {
|
||||
b = (b << BigInt(8)) | BigInt(payload[i]);
|
||||
}
|
||||
_offset = offset + length;
|
||||
return minor === 3 ? -b - BigInt(1) : b;
|
||||
}
|
||||
else if (minor === 4) {
|
||||
const decimalFraction = decode(at + offset, to);
|
||||
const [exponent, mantissa] = decimalFraction;
|
||||
const s = mantissa.toString();
|
||||
const numericString = exponent === 0 ? s : s.slice(0, s.length + exponent) + "." + s.slice(exponent);
|
||||
return new NumericValue(numericString, "bigDecimal");
|
||||
}
|
||||
else {
|
||||
const value = decode(at + offset, to);
|
||||
const valueOffset = _offset;
|
||||
_offset = offset + valueOffset;
|
||||
return tag({ tag: castBigInt(unsignedInt), value });
|
||||
}
|
||||
}
|
||||
case majorUtf8String:
|
||||
case majorMap:
|
||||
case majorList:
|
||||
case majorUnstructuredByteString:
|
||||
if (minor === minorIndefinite) {
|
||||
switch (major) {
|
||||
case majorUtf8String:
|
||||
return decodeUtf8StringIndefinite(at, to);
|
||||
case majorMap:
|
||||
return decodeMapIndefinite(at, to);
|
||||
case majorList:
|
||||
return decodeListIndefinite(at, to);
|
||||
case majorUnstructuredByteString:
|
||||
return decodeUnstructuredByteStringIndefinite(at, to);
|
||||
}
|
||||
}
|
||||
else {
|
||||
switch (major) {
|
||||
case majorUtf8String:
|
||||
return decodeUtf8String(at, to);
|
||||
case majorMap:
|
||||
return decodeMap(at, to);
|
||||
case majorList:
|
||||
return decodeList(at, to);
|
||||
case majorUnstructuredByteString:
|
||||
return decodeUnstructuredByteString(at, to);
|
||||
}
|
||||
}
|
||||
default:
|
||||
return decodeSpecial(at, to);
|
||||
}
|
||||
}
|
||||
function bytesToUtf8(bytes, at, to) {
|
||||
if (USE_BUFFER && bytes.constructor?.name === "Buffer") {
|
||||
return bytes.toString("utf-8", at, to);
|
||||
}
|
||||
if (textDecoder) {
|
||||
return textDecoder.decode(bytes.subarray(at, to));
|
||||
}
|
||||
return toUtf8(bytes.subarray(at, to));
|
||||
}
|
||||
function demote(bigInteger) {
|
||||
const num = Number(bigInteger);
|
||||
if (num < Number.MIN_SAFE_INTEGER || Number.MAX_SAFE_INTEGER < num) {
|
||||
console.warn(new Error(`@smithy/core/cbor - truncating BigInt(${bigInteger}) to ${num} with loss of precision.`));
|
||||
}
|
||||
return num;
|
||||
}
|
||||
const minorValueToArgumentLength = {
|
||||
[extendedOneByte]: 1,
|
||||
[extendedFloat16]: 2,
|
||||
[extendedFloat32]: 4,
|
||||
[extendedFloat64]: 8,
|
||||
};
|
||||
export function bytesToFloat16(a, b) {
|
||||
const sign = a >> 7;
|
||||
const exponent = (a & 124) >> 2;
|
||||
const fraction = ((a & 3) << 8) | b;
|
||||
const scalar = sign === 0 ? 1 : -1;
|
||||
let exponentComponent;
|
||||
let summation;
|
||||
if (exponent === 0b00000) {
|
||||
if (fraction === 0) {
|
||||
return 0;
|
||||
}
|
||||
else {
|
||||
exponentComponent = Math.pow(2, 1 - 15);
|
||||
summation = 0;
|
||||
}
|
||||
}
|
||||
else if (exponent === 0b11111) {
|
||||
if (fraction === 0) {
|
||||
return scalar * Infinity;
|
||||
}
|
||||
else {
|
||||
return NaN;
|
||||
}
|
||||
}
|
||||
else {
|
||||
exponentComponent = Math.pow(2, exponent - 15);
|
||||
summation = 1;
|
||||
}
|
||||
summation += fraction / 1024;
|
||||
return scalar * (exponentComponent * summation);
|
||||
}
|
||||
function decodeCount(at, to) {
|
||||
const minor = payload[at] & 31;
|
||||
if (minor < 24) {
|
||||
_offset = 1;
|
||||
return minor;
|
||||
}
|
||||
if (minor === extendedOneByte ||
|
||||
minor === extendedFloat16 ||
|
||||
minor === extendedFloat32 ||
|
||||
minor === extendedFloat64) {
|
||||
const countLength = minorValueToArgumentLength[minor];
|
||||
_offset = (countLength + 1);
|
||||
if (to - at < _offset) {
|
||||
throw new Error(`countLength ${countLength} greater than remaining buf len.`);
|
||||
}
|
||||
const countIndex = at + 1;
|
||||
if (countLength === 1) {
|
||||
return payload[countIndex];
|
||||
}
|
||||
else if (countLength === 2) {
|
||||
return dataView.getUint16(countIndex);
|
||||
}
|
||||
else if (countLength === 4) {
|
||||
return dataView.getUint32(countIndex);
|
||||
}
|
||||
return demote(dataView.getBigUint64(countIndex));
|
||||
}
|
||||
throw new Error(`unexpected minor value ${minor}.`);
|
||||
}
|
||||
function decodeUtf8String(at, to) {
|
||||
const length = decodeCount(at, to);
|
||||
const offset = _offset;
|
||||
at += offset;
|
||||
if (to - at < length) {
|
||||
throw new Error(`string len ${length} greater than remaining buf len.`);
|
||||
}
|
||||
const value = bytesToUtf8(payload, at, at + length);
|
||||
_offset = offset + length;
|
||||
return value;
|
||||
}
|
||||
function decodeUtf8StringIndefinite(at, to) {
|
||||
at += 1;
|
||||
const vector = [];
|
||||
for (const base = at; at < to;) {
|
||||
if (payload[at] === 255) {
|
||||
const data = alloc(vector.length);
|
||||
data.set(vector, 0);
|
||||
_offset = at - base + 2;
|
||||
return bytesToUtf8(data, 0, data.length);
|
||||
}
|
||||
const major = (payload[at] & 224) >> 5;
|
||||
const minor = payload[at] & 31;
|
||||
if (major !== majorUtf8String) {
|
||||
throw new Error(`unexpected major type ${major} in indefinite string.`);
|
||||
}
|
||||
if (minor === minorIndefinite) {
|
||||
throw new Error("nested indefinite string.");
|
||||
}
|
||||
const bytes = decodeUnstructuredByteString(at, to);
|
||||
const length = _offset;
|
||||
at += length;
|
||||
for (let i = 0; i < bytes.length; ++i) {
|
||||
vector.push(bytes[i]);
|
||||
}
|
||||
}
|
||||
throw new Error("expected break marker.");
|
||||
}
|
||||
function decodeUnstructuredByteString(at, to) {
|
||||
const length = decodeCount(at, to);
|
||||
const offset = _offset;
|
||||
at += offset;
|
||||
if (to - at < length) {
|
||||
throw new Error(`unstructured byte string len ${length} greater than remaining buf len.`);
|
||||
}
|
||||
const value = payload.subarray(at, at + length);
|
||||
_offset = offset + length;
|
||||
return value;
|
||||
}
|
||||
function decodeUnstructuredByteStringIndefinite(at, to) {
|
||||
at += 1;
|
||||
const vector = [];
|
||||
for (const base = at; at < to;) {
|
||||
if (payload[at] === 255) {
|
||||
const data = alloc(vector.length);
|
||||
data.set(vector, 0);
|
||||
_offset = at - base + 2;
|
||||
return data;
|
||||
}
|
||||
const major = (payload[at] & 224) >> 5;
|
||||
const minor = payload[at] & 31;
|
||||
if (major !== majorUnstructuredByteString) {
|
||||
throw new Error(`unexpected major type ${major} in indefinite string.`);
|
||||
}
|
||||
if (minor === minorIndefinite) {
|
||||
throw new Error("nested indefinite string.");
|
||||
}
|
||||
const bytes = decodeUnstructuredByteString(at, to);
|
||||
const length = _offset;
|
||||
at += length;
|
||||
for (let i = 0; i < bytes.length; ++i) {
|
||||
vector.push(bytes[i]);
|
||||
}
|
||||
}
|
||||
throw new Error("expected break marker.");
|
||||
}
|
||||
function decodeList(at, to) {
|
||||
const listDataLength = decodeCount(at, to);
|
||||
const offset = _offset;
|
||||
at += offset;
|
||||
const base = at;
|
||||
const list = Array(listDataLength);
|
||||
for (let i = 0; i < listDataLength; ++i) {
|
||||
const item = decode(at, to);
|
||||
const itemOffset = _offset;
|
||||
list[i] = item;
|
||||
at += itemOffset;
|
||||
}
|
||||
_offset = offset + (at - base);
|
||||
return list;
|
||||
}
|
||||
function decodeListIndefinite(at, to) {
|
||||
at += 1;
|
||||
const list = [];
|
||||
for (const base = at; at < to;) {
|
||||
if (payload[at] === 255) {
|
||||
_offset = at - base + 2;
|
||||
return list;
|
||||
}
|
||||
const item = decode(at, to);
|
||||
const n = _offset;
|
||||
at += n;
|
||||
list.push(item);
|
||||
}
|
||||
throw new Error("expected break marker.");
|
||||
}
|
||||
function decodeMap(at, to) {
|
||||
const mapDataLength = decodeCount(at, to);
|
||||
const offset = _offset;
|
||||
at += offset;
|
||||
const base = at;
|
||||
const map = {};
|
||||
for (let i = 0; i < mapDataLength; ++i) {
|
||||
if (at >= to) {
|
||||
throw new Error("unexpected end of map payload.");
|
||||
}
|
||||
const major = (payload[at] & 224) >> 5;
|
||||
if (major !== majorUtf8String) {
|
||||
throw new Error(`unexpected major type ${major} for map key at index ${at}.`);
|
||||
}
|
||||
const key = decode(at, to);
|
||||
at += _offset;
|
||||
const value = decode(at, to);
|
||||
at += _offset;
|
||||
map[key] = value;
|
||||
}
|
||||
_offset = offset + (at - base);
|
||||
return map;
|
||||
}
|
||||
function decodeMapIndefinite(at, to) {
|
||||
at += 1;
|
||||
const base = at;
|
||||
const map = {};
|
||||
for (; at < to;) {
|
||||
if (at >= to) {
|
||||
throw new Error("unexpected end of map payload.");
|
||||
}
|
||||
if (payload[at] === 255) {
|
||||
_offset = at - base + 2;
|
||||
return map;
|
||||
}
|
||||
const major = (payload[at] & 224) >> 5;
|
||||
if (major !== majorUtf8String) {
|
||||
throw new Error(`unexpected major type ${major} for map key.`);
|
||||
}
|
||||
const key = decode(at, to);
|
||||
at += _offset;
|
||||
const value = decode(at, to);
|
||||
at += _offset;
|
||||
map[key] = value;
|
||||
}
|
||||
throw new Error("expected break marker.");
|
||||
}
|
||||
function decodeSpecial(at, to) {
|
||||
const minor = payload[at] & 31;
|
||||
switch (minor) {
|
||||
case specialTrue:
|
||||
case specialFalse:
|
||||
_offset = 1;
|
||||
return minor === specialTrue;
|
||||
case specialNull:
|
||||
_offset = 1;
|
||||
return null;
|
||||
case specialUndefined:
|
||||
_offset = 1;
|
||||
return null;
|
||||
case extendedFloat16:
|
||||
if (to - at < 3) {
|
||||
throw new Error("incomplete float16 at end of buf.");
|
||||
}
|
||||
_offset = 3;
|
||||
return bytesToFloat16(payload[at + 1], payload[at + 2]);
|
||||
case extendedFloat32:
|
||||
if (to - at < 5) {
|
||||
throw new Error("incomplete float32 at end of buf.");
|
||||
}
|
||||
_offset = 5;
|
||||
return dataView.getFloat32(at + 1);
|
||||
case extendedFloat64:
|
||||
if (to - at < 9) {
|
||||
throw new Error("incomplete float64 at end of buf.");
|
||||
}
|
||||
_offset = 9;
|
||||
return dataView.getFloat64(at + 1);
|
||||
default:
|
||||
throw new Error(`unexpected minor value ${minor}.`);
|
||||
}
|
||||
}
|
||||
function castBigInt(bigInt) {
|
||||
if (typeof bigInt === "number") {
|
||||
return bigInt;
|
||||
}
|
||||
const num = Number(bigInt);
|
||||
if (Number.MIN_SAFE_INTEGER <= num && num <= Number.MAX_SAFE_INTEGER) {
|
||||
return num;
|
||||
}
|
||||
return bigInt;
|
||||
}
|
||||
Generated
Vendored
+221
@@ -0,0 +1,221 @@
|
||||
import { NumericValue } from "@smithy/core/serde";
|
||||
import { fromUtf8 } from "@smithy/util-utf8";
|
||||
import { alloc, extendedFloat16, extendedFloat32, extendedFloat64, majorList, majorMap, majorNegativeInt64, majorSpecial, majorTag, majorUint64, majorUnstructuredByteString, majorUtf8String, specialFalse, specialNull, specialTrue, tagSymbol, } from "./cbor-types";
|
||||
const USE_BUFFER = typeof Buffer !== "undefined";
|
||||
const initialSize = 2048;
|
||||
let data = alloc(initialSize);
|
||||
let dataView = new DataView(data.buffer, data.byteOffset, data.byteLength);
|
||||
let cursor = 0;
|
||||
function ensureSpace(bytes) {
|
||||
const remaining = data.byteLength - cursor;
|
||||
if (remaining < bytes) {
|
||||
if (cursor < 16000000) {
|
||||
resize(Math.max(data.byteLength * 4, data.byteLength + bytes));
|
||||
}
|
||||
else {
|
||||
resize(data.byteLength + bytes + 16000000);
|
||||
}
|
||||
}
|
||||
}
|
||||
export function toUint8Array() {
|
||||
const out = alloc(cursor);
|
||||
out.set(data.subarray(0, cursor), 0);
|
||||
cursor = 0;
|
||||
return out;
|
||||
}
|
||||
export function resize(size) {
|
||||
const old = data;
|
||||
data = alloc(size);
|
||||
if (old) {
|
||||
if (old.copy) {
|
||||
old.copy(data, 0, 0, old.byteLength);
|
||||
}
|
||||
else {
|
||||
data.set(old, 0);
|
||||
}
|
||||
}
|
||||
dataView = new DataView(data.buffer, data.byteOffset, data.byteLength);
|
||||
}
|
||||
function encodeHeader(major, value) {
|
||||
if (value < 24) {
|
||||
data[cursor++] = (major << 5) | value;
|
||||
}
|
||||
else if (value < 1 << 8) {
|
||||
data[cursor++] = (major << 5) | 24;
|
||||
data[cursor++] = value;
|
||||
}
|
||||
else if (value < 1 << 16) {
|
||||
data[cursor++] = (major << 5) | extendedFloat16;
|
||||
dataView.setUint16(cursor, value);
|
||||
cursor += 2;
|
||||
}
|
||||
else if (value < 2 ** 32) {
|
||||
data[cursor++] = (major << 5) | extendedFloat32;
|
||||
dataView.setUint32(cursor, value);
|
||||
cursor += 4;
|
||||
}
|
||||
else {
|
||||
data[cursor++] = (major << 5) | extendedFloat64;
|
||||
dataView.setBigUint64(cursor, typeof value === "bigint" ? value : BigInt(value));
|
||||
cursor += 8;
|
||||
}
|
||||
}
|
||||
export function encode(_input) {
|
||||
const encodeStack = [_input];
|
||||
while (encodeStack.length) {
|
||||
const input = encodeStack.pop();
|
||||
ensureSpace(typeof input === "string" ? input.length * 4 : 64);
|
||||
if (typeof input === "string") {
|
||||
if (USE_BUFFER) {
|
||||
encodeHeader(majorUtf8String, Buffer.byteLength(input));
|
||||
cursor += data.write(input, cursor);
|
||||
}
|
||||
else {
|
||||
const bytes = fromUtf8(input);
|
||||
encodeHeader(majorUtf8String, bytes.byteLength);
|
||||
data.set(bytes, cursor);
|
||||
cursor += bytes.byteLength;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
else if (typeof input === "number") {
|
||||
if (Number.isInteger(input)) {
|
||||
const nonNegative = input >= 0;
|
||||
const major = nonNegative ? majorUint64 : majorNegativeInt64;
|
||||
const value = nonNegative ? input : -input - 1;
|
||||
if (value < 24) {
|
||||
data[cursor++] = (major << 5) | value;
|
||||
}
|
||||
else if (value < 256) {
|
||||
data[cursor++] = (major << 5) | 24;
|
||||
data[cursor++] = value;
|
||||
}
|
||||
else if (value < 65536) {
|
||||
data[cursor++] = (major << 5) | extendedFloat16;
|
||||
data[cursor++] = value >> 8;
|
||||
data[cursor++] = value;
|
||||
}
|
||||
else if (value < 4294967296) {
|
||||
data[cursor++] = (major << 5) | extendedFloat32;
|
||||
dataView.setUint32(cursor, value);
|
||||
cursor += 4;
|
||||
}
|
||||
else {
|
||||
data[cursor++] = (major << 5) | extendedFloat64;
|
||||
dataView.setBigUint64(cursor, BigInt(value));
|
||||
cursor += 8;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
data[cursor++] = (majorSpecial << 5) | extendedFloat64;
|
||||
dataView.setFloat64(cursor, input);
|
||||
cursor += 8;
|
||||
continue;
|
||||
}
|
||||
else if (typeof input === "bigint") {
|
||||
const nonNegative = input >= 0;
|
||||
const major = nonNegative ? majorUint64 : majorNegativeInt64;
|
||||
const value = nonNegative ? input : -input - BigInt(1);
|
||||
const n = Number(value);
|
||||
if (n < 24) {
|
||||
data[cursor++] = (major << 5) | n;
|
||||
}
|
||||
else if (n < 256) {
|
||||
data[cursor++] = (major << 5) | 24;
|
||||
data[cursor++] = n;
|
||||
}
|
||||
else if (n < 65536) {
|
||||
data[cursor++] = (major << 5) | extendedFloat16;
|
||||
data[cursor++] = n >> 8;
|
||||
data[cursor++] = n & 255;
|
||||
}
|
||||
else if (n < 4294967296) {
|
||||
data[cursor++] = (major << 5) | extendedFloat32;
|
||||
dataView.setUint32(cursor, n);
|
||||
cursor += 4;
|
||||
}
|
||||
else if (value < BigInt("18446744073709551616")) {
|
||||
data[cursor++] = (major << 5) | extendedFloat64;
|
||||
dataView.setBigUint64(cursor, value);
|
||||
cursor += 8;
|
||||
}
|
||||
else {
|
||||
const binaryBigInt = value.toString(2);
|
||||
const bigIntBytes = new Uint8Array(Math.ceil(binaryBigInt.length / 8));
|
||||
let b = value;
|
||||
let i = 0;
|
||||
while (bigIntBytes.byteLength - ++i >= 0) {
|
||||
bigIntBytes[bigIntBytes.byteLength - i] = Number(b & BigInt(255));
|
||||
b >>= BigInt(8);
|
||||
}
|
||||
ensureSpace(bigIntBytes.byteLength * 2);
|
||||
data[cursor++] = nonNegative ? 194 : 195;
|
||||
if (USE_BUFFER) {
|
||||
encodeHeader(majorUnstructuredByteString, Buffer.byteLength(bigIntBytes));
|
||||
}
|
||||
else {
|
||||
encodeHeader(majorUnstructuredByteString, bigIntBytes.byteLength);
|
||||
}
|
||||
data.set(bigIntBytes, cursor);
|
||||
cursor += bigIntBytes.byteLength;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
else if (input === null) {
|
||||
data[cursor++] = (majorSpecial << 5) | specialNull;
|
||||
continue;
|
||||
}
|
||||
else if (typeof input === "boolean") {
|
||||
data[cursor++] = (majorSpecial << 5) | (input ? specialTrue : specialFalse);
|
||||
continue;
|
||||
}
|
||||
else if (typeof input === "undefined") {
|
||||
throw new Error("@smithy/core/cbor: client may not serialize undefined value.");
|
||||
}
|
||||
else if (Array.isArray(input)) {
|
||||
for (let i = input.length - 1; i >= 0; --i) {
|
||||
encodeStack.push(input[i]);
|
||||
}
|
||||
encodeHeader(majorList, input.length);
|
||||
continue;
|
||||
}
|
||||
else if (typeof input.byteLength === "number") {
|
||||
ensureSpace(input.length * 2);
|
||||
encodeHeader(majorUnstructuredByteString, input.length);
|
||||
data.set(input, cursor);
|
||||
cursor += input.byteLength;
|
||||
continue;
|
||||
}
|
||||
else if (typeof input === "object") {
|
||||
if (input instanceof NumericValue) {
|
||||
const decimalIndex = input.string.indexOf(".");
|
||||
const exponent = decimalIndex === -1 ? 0 : decimalIndex - input.string.length + 1;
|
||||
const mantissa = BigInt(input.string.replace(".", ""));
|
||||
data[cursor++] = 196;
|
||||
encodeStack.push(mantissa);
|
||||
encodeStack.push(exponent);
|
||||
encodeHeader(majorList, 2);
|
||||
continue;
|
||||
}
|
||||
if (input[tagSymbol]) {
|
||||
if ("tag" in input && "value" in input) {
|
||||
encodeStack.push(input.value);
|
||||
encodeHeader(majorTag, input.tag);
|
||||
continue;
|
||||
}
|
||||
else {
|
||||
throw new Error("tag encountered with missing fields, need 'tag' and 'value', found: " + JSON.stringify(input));
|
||||
}
|
||||
}
|
||||
const keys = Object.keys(input);
|
||||
for (let i = keys.length - 1; i >= 0; --i) {
|
||||
const key = keys[i];
|
||||
encodeStack.push(input[key]);
|
||||
encodeStack.push(key);
|
||||
}
|
||||
encodeHeader(majorMap, keys.length);
|
||||
continue;
|
||||
}
|
||||
throw new Error(`data type ${input?.constructor?.name ?? typeof input} not compatible for encoding.`);
|
||||
}
|
||||
}
|
||||
Generated
Vendored
+25
@@ -0,0 +1,25 @@
|
||||
export const majorUint64 = 0;
|
||||
export const majorNegativeInt64 = 1;
|
||||
export const majorUnstructuredByteString = 2;
|
||||
export const majorUtf8String = 3;
|
||||
export const majorList = 4;
|
||||
export const majorMap = 5;
|
||||
export const majorTag = 6;
|
||||
export const majorSpecial = 7;
|
||||
export const specialFalse = 20;
|
||||
export const specialTrue = 21;
|
||||
export const specialNull = 22;
|
||||
export const specialUndefined = 23;
|
||||
export const extendedOneByte = 24;
|
||||
export const extendedFloat16 = 25;
|
||||
export const extendedFloat32 = 26;
|
||||
export const extendedFloat64 = 27;
|
||||
export const minorIndefinite = 31;
|
||||
export function alloc(size) {
|
||||
return typeof Buffer !== "undefined" ? Buffer.alloc(size) : new Uint8Array(size);
|
||||
}
|
||||
export const tagSymbol = Symbol("@smithy/core/cbor::tagSymbol");
|
||||
export function tag(data) {
|
||||
data[tagSymbol] = true;
|
||||
return data;
|
||||
}
|
||||
+21
@@ -0,0 +1,21 @@
|
||||
import { decode, setPayload } from "./cbor-decode";
|
||||
import { encode, resize, toUint8Array } from "./cbor-encode";
|
||||
export const cbor = {
|
||||
deserialize(payload) {
|
||||
setPayload(payload);
|
||||
return decode(0, payload.length);
|
||||
},
|
||||
serialize(input) {
|
||||
try {
|
||||
encode(input);
|
||||
return toUint8Array();
|
||||
}
|
||||
catch (e) {
|
||||
toUint8Array();
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
resizeEncodingBuffer(size) {
|
||||
resize(size);
|
||||
},
|
||||
};
|
||||
+5
@@ -0,0 +1,5 @@
|
||||
export { cbor } from "./cbor";
|
||||
export { tag, tagSymbol } from "./cbor-types";
|
||||
export * from "./parseCborBody";
|
||||
export * from "./SmithyRpcV2CborProtocol";
|
||||
export * from "./CborCodec";
|
||||
Generated
Vendored
+86
@@ -0,0 +1,86 @@
|
||||
import { collectBody } from "@smithy/core/protocols";
|
||||
import { HttpRequest as __HttpRequest } from "@smithy/protocol-http";
|
||||
import { calculateBodyLength } from "@smithy/util-body-length-browser";
|
||||
import { cbor } from "./cbor";
|
||||
import { tag, tagSymbol } from "./cbor-types";
|
||||
export const parseCborBody = (streamBody, context) => {
|
||||
return collectBody(streamBody, context).then(async (bytes) => {
|
||||
if (bytes.length) {
|
||||
try {
|
||||
return cbor.deserialize(bytes);
|
||||
}
|
||||
catch (e) {
|
||||
Object.defineProperty(e, "$responseBodyText", {
|
||||
value: context.utf8Encoder(bytes),
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
return {};
|
||||
});
|
||||
};
|
||||
export const dateToTag = (date) => {
|
||||
return tag({
|
||||
tag: 1,
|
||||
value: date.getTime() / 1000,
|
||||
});
|
||||
};
|
||||
export const parseCborErrorBody = async (errorBody, context) => {
|
||||
const value = await parseCborBody(errorBody, context);
|
||||
value.message = value.message ?? value.Message;
|
||||
return value;
|
||||
};
|
||||
export const loadSmithyRpcV2CborErrorCode = (output, data) => {
|
||||
const sanitizeErrorCode = (rawValue) => {
|
||||
let cleanValue = rawValue;
|
||||
if (typeof cleanValue === "number") {
|
||||
cleanValue = cleanValue.toString();
|
||||
}
|
||||
if (cleanValue.indexOf(",") >= 0) {
|
||||
cleanValue = cleanValue.split(",")[0];
|
||||
}
|
||||
if (cleanValue.indexOf(":") >= 0) {
|
||||
cleanValue = cleanValue.split(":")[0];
|
||||
}
|
||||
if (cleanValue.indexOf("#") >= 0) {
|
||||
cleanValue = cleanValue.split("#")[1];
|
||||
}
|
||||
return cleanValue;
|
||||
};
|
||||
if (data["__type"] !== undefined) {
|
||||
return sanitizeErrorCode(data["__type"]);
|
||||
}
|
||||
const codeKey = Object.keys(data).find((key) => key.toLowerCase() === "code");
|
||||
if (codeKey && data[codeKey] !== undefined) {
|
||||
return sanitizeErrorCode(data[codeKey]);
|
||||
}
|
||||
};
|
||||
export const checkCborResponse = (response) => {
|
||||
if (String(response.headers["smithy-protocol"]).toLowerCase() !== "rpc-v2-cbor") {
|
||||
throw new Error("Malformed RPCv2 CBOR response, status: " + response.statusCode);
|
||||
}
|
||||
};
|
||||
export const buildHttpRpcRequest = async (context, headers, path, resolvedHostname, body) => {
|
||||
const { hostname, protocol = "https", port, path: basePath } = await context.endpoint();
|
||||
const contents = {
|
||||
protocol,
|
||||
hostname,
|
||||
port,
|
||||
method: "POST",
|
||||
path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path,
|
||||
headers: {
|
||||
...headers,
|
||||
},
|
||||
};
|
||||
if (resolvedHostname !== undefined) {
|
||||
contents.hostname = resolvedHostname;
|
||||
}
|
||||
if (body !== undefined) {
|
||||
contents.body = body;
|
||||
try {
|
||||
contents.headers["content-length"] = String(calculateBodyLength(body));
|
||||
}
|
||||
catch (e) { }
|
||||
}
|
||||
return new __HttpRequest(contents);
|
||||
};
|
||||
Generated
Vendored
+178
@@ -0,0 +1,178 @@
|
||||
import { NormalizedSchema, SCHEMA } from "@smithy/core/schema";
|
||||
import { HttpRequest } from "@smithy/protocol-http";
|
||||
import { collectBody } from "./collect-stream-body";
|
||||
import { extendedEncodeURIComponent } from "./extended-encode-uri-component";
|
||||
import { HttpProtocol } from "./HttpProtocol";
|
||||
export class HttpBindingProtocol extends HttpProtocol {
|
||||
async serializeRequest(operationSchema, input, context) {
|
||||
const serializer = this.serializer;
|
||||
const query = {};
|
||||
const headers = {};
|
||||
const endpoint = await context.endpoint();
|
||||
const ns = NormalizedSchema.of(operationSchema?.input);
|
||||
const schema = ns.getSchema();
|
||||
let hasNonHttpBindingMember = false;
|
||||
let payload;
|
||||
const request = new HttpRequest({
|
||||
protocol: "",
|
||||
hostname: "",
|
||||
port: undefined,
|
||||
path: "",
|
||||
fragment: undefined,
|
||||
query: query,
|
||||
headers: headers,
|
||||
body: undefined,
|
||||
});
|
||||
if (endpoint) {
|
||||
this.updateServiceEndpoint(request, endpoint);
|
||||
this.setHostPrefix(request, operationSchema, input);
|
||||
const opTraits = NormalizedSchema.translateTraits(operationSchema.traits);
|
||||
if (opTraits.http) {
|
||||
request.method = opTraits.http[0];
|
||||
const [path, search] = opTraits.http[1].split("?");
|
||||
if (request.path == "/") {
|
||||
request.path = path;
|
||||
}
|
||||
else {
|
||||
request.path += path;
|
||||
}
|
||||
const traitSearchParams = new URLSearchParams(search ?? "");
|
||||
Object.assign(query, Object.fromEntries(traitSearchParams));
|
||||
}
|
||||
}
|
||||
const _input = {
|
||||
...input,
|
||||
};
|
||||
for (const memberName of Object.keys(_input)) {
|
||||
const memberNs = ns.getMemberSchema(memberName);
|
||||
if (memberNs === undefined) {
|
||||
continue;
|
||||
}
|
||||
const memberTraits = memberNs.getMergedTraits();
|
||||
const inputMember = _input[memberName];
|
||||
if (memberTraits.httpPayload) {
|
||||
const isStreaming = memberNs.isStreaming();
|
||||
if (isStreaming) {
|
||||
const isEventStream = memberNs.isStructSchema();
|
||||
if (isEventStream) {
|
||||
throw new Error("serialization of event streams is not yet implemented");
|
||||
}
|
||||
else {
|
||||
payload = inputMember;
|
||||
}
|
||||
}
|
||||
else {
|
||||
serializer.write(memberNs, inputMember);
|
||||
payload = serializer.flush();
|
||||
}
|
||||
}
|
||||
else if (memberTraits.httpLabel) {
|
||||
serializer.write(memberNs, inputMember);
|
||||
const replacement = serializer.flush();
|
||||
if (request.path.includes(`{${memberName}+}`)) {
|
||||
request.path = request.path.replace(`{${memberName}+}`, replacement.split("/").map(extendedEncodeURIComponent).join("/"));
|
||||
}
|
||||
else if (request.path.includes(`{${memberName}}`)) {
|
||||
request.path = request.path.replace(`{${memberName}}`, extendedEncodeURIComponent(replacement));
|
||||
}
|
||||
delete _input[memberName];
|
||||
}
|
||||
else if (memberTraits.httpHeader) {
|
||||
serializer.write(memberNs, inputMember);
|
||||
headers[memberTraits.httpHeader.toLowerCase()] = String(serializer.flush());
|
||||
delete _input[memberName];
|
||||
}
|
||||
else if (typeof memberTraits.httpPrefixHeaders === "string") {
|
||||
for (const [key, val] of Object.entries(inputMember)) {
|
||||
const amalgam = memberTraits.httpPrefixHeaders + key;
|
||||
serializer.write([memberNs.getValueSchema(), { httpHeader: amalgam }], val);
|
||||
headers[amalgam.toLowerCase()] = serializer.flush();
|
||||
}
|
||||
delete _input[memberName];
|
||||
}
|
||||
else if (memberTraits.httpQuery || memberTraits.httpQueryParams) {
|
||||
this.serializeQuery(memberNs, inputMember, query);
|
||||
delete _input[memberName];
|
||||
}
|
||||
else {
|
||||
hasNonHttpBindingMember = true;
|
||||
}
|
||||
}
|
||||
if (hasNonHttpBindingMember && input) {
|
||||
serializer.write(schema, _input);
|
||||
payload = serializer.flush();
|
||||
}
|
||||
request.headers = headers;
|
||||
request.query = query;
|
||||
request.body = payload;
|
||||
return request;
|
||||
}
|
||||
serializeQuery(ns, data, query) {
|
||||
const serializer = this.serializer;
|
||||
const traits = ns.getMergedTraits();
|
||||
if (traits.httpQueryParams) {
|
||||
for (const [key, val] of Object.entries(data)) {
|
||||
if (!(key in query)) {
|
||||
this.serializeQuery(NormalizedSchema.of([
|
||||
ns.getValueSchema(),
|
||||
{
|
||||
...traits,
|
||||
httpQuery: key,
|
||||
httpQueryParams: undefined,
|
||||
},
|
||||
]), val, query);
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (ns.isListSchema()) {
|
||||
const sparse = !!ns.getMergedTraits().sparse;
|
||||
const buffer = [];
|
||||
for (const item of data) {
|
||||
serializer.write([ns.getValueSchema(), traits], item);
|
||||
const serializable = serializer.flush();
|
||||
if (sparse || serializable !== undefined) {
|
||||
buffer.push(serializable);
|
||||
}
|
||||
}
|
||||
query[traits.httpQuery] = buffer;
|
||||
}
|
||||
else {
|
||||
serializer.write([ns, traits], data);
|
||||
query[traits.httpQuery] = serializer.flush();
|
||||
}
|
||||
}
|
||||
async deserializeResponse(operationSchema, context, response) {
|
||||
const deserializer = this.deserializer;
|
||||
const ns = NormalizedSchema.of(operationSchema.output);
|
||||
const dataObject = {};
|
||||
if (response.statusCode >= 300) {
|
||||
const bytes = await collectBody(response.body, context);
|
||||
if (bytes.byteLength > 0) {
|
||||
Object.assign(dataObject, await deserializer.read(SCHEMA.DOCUMENT, bytes));
|
||||
}
|
||||
await this.handleError(operationSchema, context, response, dataObject, this.deserializeMetadata(response));
|
||||
throw new Error("@smithy/core/protocols - HTTP Protocol error handler failed to throw.");
|
||||
}
|
||||
for (const header in response.headers) {
|
||||
const value = response.headers[header];
|
||||
delete response.headers[header];
|
||||
response.headers[header.toLowerCase()] = value;
|
||||
}
|
||||
const nonHttpBindingMembers = await this.deserializeHttpMessage(ns, context, response, dataObject);
|
||||
if (nonHttpBindingMembers.length) {
|
||||
const bytes = await collectBody(response.body, context);
|
||||
if (bytes.byteLength > 0) {
|
||||
const dataFromBody = await deserializer.read(ns, bytes);
|
||||
for (const member of nonHttpBindingMembers) {
|
||||
dataObject[member] = dataFromBody[member];
|
||||
}
|
||||
}
|
||||
}
|
||||
const output = {
|
||||
$metadata: this.deserializeMetadata(response),
|
||||
...dataObject,
|
||||
};
|
||||
return output;
|
||||
}
|
||||
}
|
||||
Generated
Vendored
+171
@@ -0,0 +1,171 @@
|
||||
import { NormalizedSchema, SCHEMA } from "@smithy/core/schema";
|
||||
import { splitEvery, splitHeader } from "@smithy/core/serde";
|
||||
import { HttpRequest, HttpResponse } from "@smithy/protocol-http";
|
||||
import { sdkStreamMixin } from "@smithy/util-stream";
|
||||
import { collectBody } from "./collect-stream-body";
|
||||
export class HttpProtocol {
|
||||
constructor(options) {
|
||||
this.options = options;
|
||||
}
|
||||
getRequestType() {
|
||||
return HttpRequest;
|
||||
}
|
||||
getResponseType() {
|
||||
return HttpResponse;
|
||||
}
|
||||
setSerdeContext(serdeContext) {
|
||||
this.serdeContext = serdeContext;
|
||||
this.serializer.setSerdeContext(serdeContext);
|
||||
this.deserializer.setSerdeContext(serdeContext);
|
||||
if (this.getPayloadCodec()) {
|
||||
this.getPayloadCodec().setSerdeContext(serdeContext);
|
||||
}
|
||||
}
|
||||
updateServiceEndpoint(request, endpoint) {
|
||||
if ("url" in endpoint) {
|
||||
request.protocol = endpoint.url.protocol;
|
||||
request.hostname = endpoint.url.hostname;
|
||||
request.port = endpoint.url.port ? Number(endpoint.url.port) : undefined;
|
||||
request.path = endpoint.url.pathname;
|
||||
request.fragment = endpoint.url.hash || void 0;
|
||||
request.username = endpoint.url.username || void 0;
|
||||
request.password = endpoint.url.password || void 0;
|
||||
for (const [k, v] of endpoint.url.searchParams.entries()) {
|
||||
if (!request.query) {
|
||||
request.query = {};
|
||||
}
|
||||
request.query[k] = v;
|
||||
}
|
||||
return request;
|
||||
}
|
||||
else {
|
||||
request.protocol = endpoint.protocol;
|
||||
request.hostname = endpoint.hostname;
|
||||
request.port = endpoint.port ? Number(endpoint.port) : undefined;
|
||||
request.path = endpoint.path;
|
||||
request.query = {
|
||||
...endpoint.query,
|
||||
};
|
||||
return request;
|
||||
}
|
||||
}
|
||||
setHostPrefix(request, operationSchema, input) {
|
||||
const operationNs = NormalizedSchema.of(operationSchema);
|
||||
const inputNs = NormalizedSchema.of(operationSchema.input);
|
||||
if (operationNs.getMergedTraits().endpoint) {
|
||||
let hostPrefix = operationNs.getMergedTraits().endpoint?.[0];
|
||||
if (typeof hostPrefix === "string") {
|
||||
const hostLabelInputs = [...inputNs.structIterator()].filter(([, member]) => member.getMergedTraits().hostLabel);
|
||||
for (const [name] of hostLabelInputs) {
|
||||
const replacement = input[name];
|
||||
if (typeof replacement !== "string") {
|
||||
throw new Error(`@smithy/core/schema - ${name} in input must be a string as hostLabel.`);
|
||||
}
|
||||
hostPrefix = hostPrefix.replace(`{${name}}`, replacement);
|
||||
}
|
||||
request.hostname = hostPrefix + request.hostname;
|
||||
}
|
||||
}
|
||||
}
|
||||
deserializeMetadata(output) {
|
||||
return {
|
||||
httpStatusCode: output.statusCode,
|
||||
requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"],
|
||||
extendedRequestId: output.headers["x-amz-id-2"],
|
||||
cfId: output.headers["x-amz-cf-id"],
|
||||
};
|
||||
}
|
||||
async deserializeHttpMessage(schema, context, response, arg4, arg5) {
|
||||
let dataObject;
|
||||
if (arg4 instanceof Set) {
|
||||
dataObject = arg5;
|
||||
}
|
||||
else {
|
||||
dataObject = arg4;
|
||||
}
|
||||
const deserializer = this.deserializer;
|
||||
const ns = NormalizedSchema.of(schema);
|
||||
const nonHttpBindingMembers = [];
|
||||
for (const [memberName, memberSchema] of ns.structIterator()) {
|
||||
const memberTraits = memberSchema.getMemberTraits();
|
||||
if (memberTraits.httpPayload) {
|
||||
const isStreaming = memberSchema.isStreaming();
|
||||
if (isStreaming) {
|
||||
const isEventStream = memberSchema.isStructSchema();
|
||||
if (isEventStream) {
|
||||
const context = this.serdeContext;
|
||||
if (!context.eventStreamMarshaller) {
|
||||
throw new Error("@smithy/core - HttpProtocol: eventStreamMarshaller missing in serdeContext.");
|
||||
}
|
||||
const memberSchemas = memberSchema.getMemberSchemas();
|
||||
dataObject[memberName] = context.eventStreamMarshaller.deserialize(response.body, async (event) => {
|
||||
const unionMember = Object.keys(event).find((key) => {
|
||||
return key !== "__type";
|
||||
}) ?? "";
|
||||
if (unionMember in memberSchemas) {
|
||||
const eventStreamSchema = memberSchemas[unionMember];
|
||||
return {
|
||||
[unionMember]: await deserializer.read(eventStreamSchema, event[unionMember].body),
|
||||
};
|
||||
}
|
||||
else {
|
||||
return {
|
||||
$unknown: event,
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
else {
|
||||
dataObject[memberName] = sdkStreamMixin(response.body);
|
||||
}
|
||||
}
|
||||
else if (response.body) {
|
||||
const bytes = await collectBody(response.body, context);
|
||||
if (bytes.byteLength > 0) {
|
||||
dataObject[memberName] = await deserializer.read(memberSchema, bytes);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (memberTraits.httpHeader) {
|
||||
const key = String(memberTraits.httpHeader).toLowerCase();
|
||||
const value = response.headers[key];
|
||||
if (null != value) {
|
||||
if (memberSchema.isListSchema()) {
|
||||
const headerListValueSchema = memberSchema.getValueSchema();
|
||||
let sections;
|
||||
if (headerListValueSchema.isTimestampSchema() &&
|
||||
headerListValueSchema.getSchema() === SCHEMA.TIMESTAMP_DEFAULT) {
|
||||
sections = splitEvery(value, ",", 2);
|
||||
}
|
||||
else {
|
||||
sections = splitHeader(value);
|
||||
}
|
||||
const list = [];
|
||||
for (const section of sections) {
|
||||
list.push(await deserializer.read([headerListValueSchema, { httpHeader: key }], section.trim()));
|
||||
}
|
||||
dataObject[memberName] = list;
|
||||
}
|
||||
else {
|
||||
dataObject[memberName] = await deserializer.read(memberSchema, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (memberTraits.httpPrefixHeaders !== undefined) {
|
||||
dataObject[memberName] = {};
|
||||
for (const [header, value] of Object.entries(response.headers)) {
|
||||
if (header.startsWith(memberTraits.httpPrefixHeaders)) {
|
||||
dataObject[memberName][header.slice(memberTraits.httpPrefixHeaders.length)] = await deserializer.read([memberSchema.getValueSchema(), { httpHeader: header }], value);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (memberTraits.httpResponseCode) {
|
||||
dataObject[memberName] = response.statusCode;
|
||||
}
|
||||
else {
|
||||
nonHttpBindingMembers.push(memberName);
|
||||
}
|
||||
}
|
||||
return nonHttpBindingMembers;
|
||||
}
|
||||
}
|
||||
Generated
Vendored
+68
@@ -0,0 +1,68 @@
|
||||
import { NormalizedSchema, SCHEMA } from "@smithy/core/schema";
|
||||
import { HttpRequest } from "@smithy/protocol-http";
|
||||
import { collectBody } from "./collect-stream-body";
|
||||
import { HttpProtocol } from "./HttpProtocol";
|
||||
export class RpcProtocol extends HttpProtocol {
|
||||
async serializeRequest(operationSchema, input, context) {
|
||||
const serializer = this.serializer;
|
||||
const query = {};
|
||||
const headers = {};
|
||||
const endpoint = await context.endpoint();
|
||||
const ns = NormalizedSchema.of(operationSchema?.input);
|
||||
const schema = ns.getSchema();
|
||||
let payload;
|
||||
const request = new HttpRequest({
|
||||
protocol: "",
|
||||
hostname: "",
|
||||
port: undefined,
|
||||
path: "/",
|
||||
fragment: undefined,
|
||||
query: query,
|
||||
headers: headers,
|
||||
body: undefined,
|
||||
});
|
||||
if (endpoint) {
|
||||
this.updateServiceEndpoint(request, endpoint);
|
||||
this.setHostPrefix(request, operationSchema, input);
|
||||
}
|
||||
const _input = {
|
||||
...input,
|
||||
};
|
||||
if (input) {
|
||||
serializer.write(schema, _input);
|
||||
payload = serializer.flush();
|
||||
}
|
||||
request.headers = headers;
|
||||
request.query = query;
|
||||
request.body = payload;
|
||||
request.method = "POST";
|
||||
return request;
|
||||
}
|
||||
async deserializeResponse(operationSchema, context, response) {
|
||||
const deserializer = this.deserializer;
|
||||
const ns = NormalizedSchema.of(operationSchema.output);
|
||||
const dataObject = {};
|
||||
if (response.statusCode >= 300) {
|
||||
const bytes = await collectBody(response.body, context);
|
||||
if (bytes.byteLength > 0) {
|
||||
Object.assign(dataObject, await deserializer.read(SCHEMA.DOCUMENT, bytes));
|
||||
}
|
||||
await this.handleError(operationSchema, context, response, dataObject, this.deserializeMetadata(response));
|
||||
throw new Error("@smithy/core/protocols - RPC Protocol error handler failed to throw.");
|
||||
}
|
||||
for (const header in response.headers) {
|
||||
const value = response.headers[header];
|
||||
delete response.headers[header];
|
||||
response.headers[header.toLowerCase()] = value;
|
||||
}
|
||||
const bytes = await collectBody(response.body, context);
|
||||
if (bytes.byteLength > 0) {
|
||||
Object.assign(dataObject, await deserializer.read(ns, bytes));
|
||||
}
|
||||
const output = {
|
||||
$metadata: this.deserializeMetadata(response),
|
||||
...dataObject,
|
||||
};
|
||||
return output;
|
||||
}
|
||||
}
|
||||
Generated
Vendored
+11
@@ -0,0 +1,11 @@
|
||||
import { Uint8ArrayBlobAdapter } from "@smithy/util-stream";
|
||||
export const collectBody = async (streamBody = new Uint8Array(), context) => {
|
||||
if (streamBody instanceof Uint8Array) {
|
||||
return Uint8ArrayBlobAdapter.mutate(streamBody);
|
||||
}
|
||||
if (!streamBody) {
|
||||
return Uint8ArrayBlobAdapter.mutate(new Uint8Array());
|
||||
}
|
||||
const fromContext = context.streamCollector(streamBody);
|
||||
return Uint8ArrayBlobAdapter.mutate(await fromContext);
|
||||
};
|
||||
Generated
Vendored
+5
@@ -0,0 +1,5 @@
|
||||
export function extendedEncodeURIComponent(str) {
|
||||
return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {
|
||||
return "%" + c.charCodeAt(0).toString(16).toUpperCase();
|
||||
});
|
||||
}
|
||||
Generated
Vendored
+11
@@ -0,0 +1,11 @@
|
||||
export * from "./collect-stream-body";
|
||||
export * from "./extended-encode-uri-component";
|
||||
export * from "./HttpBindingProtocol";
|
||||
export * from "./RpcProtocol";
|
||||
export * from "./requestBuilder";
|
||||
export * from "./resolve-path";
|
||||
export * from "./serde/FromStringShapeDeserializer";
|
||||
export * from "./serde/HttpInterceptingShapeDeserializer";
|
||||
export * from "./serde/HttpInterceptingShapeSerializer";
|
||||
export * from "./serde/ToStringShapeSerializer";
|
||||
export * from "./serde/determineTimestampFormat";
|
||||
Generated
Vendored
+67
@@ -0,0 +1,67 @@
|
||||
import { HttpRequest } from "@smithy/protocol-http";
|
||||
import { resolvedPath } from "./resolve-path";
|
||||
export function requestBuilder(input, context) {
|
||||
return new RequestBuilder(input, context);
|
||||
}
|
||||
export class RequestBuilder {
|
||||
constructor(input, context) {
|
||||
this.input = input;
|
||||
this.context = context;
|
||||
this.query = {};
|
||||
this.method = "";
|
||||
this.headers = {};
|
||||
this.path = "";
|
||||
this.body = null;
|
||||
this.hostname = "";
|
||||
this.resolvePathStack = [];
|
||||
}
|
||||
async build() {
|
||||
const { hostname, protocol = "https", port, path: basePath } = await this.context.endpoint();
|
||||
this.path = basePath;
|
||||
for (const resolvePath of this.resolvePathStack) {
|
||||
resolvePath(this.path);
|
||||
}
|
||||
return new HttpRequest({
|
||||
protocol,
|
||||
hostname: this.hostname || hostname,
|
||||
port,
|
||||
method: this.method,
|
||||
path: this.path,
|
||||
query: this.query,
|
||||
body: this.body,
|
||||
headers: this.headers,
|
||||
});
|
||||
}
|
||||
hn(hostname) {
|
||||
this.hostname = hostname;
|
||||
return this;
|
||||
}
|
||||
bp(uriLabel) {
|
||||
this.resolvePathStack.push((basePath) => {
|
||||
this.path = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + uriLabel;
|
||||
});
|
||||
return this;
|
||||
}
|
||||
p(memberName, labelValueProvider, uriLabel, isGreedyLabel) {
|
||||
this.resolvePathStack.push((path) => {
|
||||
this.path = resolvedPath(path, this.input, memberName, labelValueProvider, uriLabel, isGreedyLabel);
|
||||
});
|
||||
return this;
|
||||
}
|
||||
h(headers) {
|
||||
this.headers = headers;
|
||||
return this;
|
||||
}
|
||||
q(query) {
|
||||
this.query = query;
|
||||
return this;
|
||||
}
|
||||
b(body) {
|
||||
this.body = body;
|
||||
return this;
|
||||
}
|
||||
m(method) {
|
||||
this.method = method;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
Generated
Vendored
+19
@@ -0,0 +1,19 @@
|
||||
import { extendedEncodeURIComponent } from "./extended-encode-uri-component";
|
||||
export const resolvedPath = (resolvedPath, input, memberName, labelValueProvider, uriLabel, isGreedyLabel) => {
|
||||
if (input != null && input[memberName] !== undefined) {
|
||||
const labelValue = labelValueProvider();
|
||||
if (labelValue.length <= 0) {
|
||||
throw new Error("Empty value provided for input HTTP label: " + memberName + ".");
|
||||
}
|
||||
resolvedPath = resolvedPath.replace(uriLabel, isGreedyLabel
|
||||
? labelValue
|
||||
.split("/")
|
||||
.map((segment) => extendedEncodeURIComponent(segment))
|
||||
.join("/")
|
||||
: extendedEncodeURIComponent(labelValue));
|
||||
}
|
||||
else {
|
||||
throw new Error("No value provided for input HTTP label: " + memberName + ".");
|
||||
}
|
||||
return resolvedPath;
|
||||
};
|
||||
Generated
Vendored
+64
@@ -0,0 +1,64 @@
|
||||
import { NormalizedSchema, SCHEMA } from "@smithy/core/schema";
|
||||
import { LazyJsonString, NumericValue, parseEpochTimestamp, parseRfc3339DateTimeWithOffset, parseRfc7231DateTime, splitHeader, } from "@smithy/core/serde";
|
||||
import { fromBase64 } from "@smithy/util-base64";
|
||||
import { toUtf8 } from "@smithy/util-utf8";
|
||||
import { determineTimestampFormat } from "./determineTimestampFormat";
|
||||
export class FromStringShapeDeserializer {
|
||||
constructor(settings) {
|
||||
this.settings = settings;
|
||||
}
|
||||
setSerdeContext(serdeContext) {
|
||||
this.serdeContext = serdeContext;
|
||||
}
|
||||
read(_schema, data) {
|
||||
const ns = NormalizedSchema.of(_schema);
|
||||
if (ns.isListSchema()) {
|
||||
return splitHeader(data).map((item) => this.read(ns.getValueSchema(), item));
|
||||
}
|
||||
if (ns.isBlobSchema()) {
|
||||
return (this.serdeContext?.base64Decoder ?? fromBase64)(data);
|
||||
}
|
||||
if (ns.isTimestampSchema()) {
|
||||
const format = determineTimestampFormat(ns, this.settings);
|
||||
switch (format) {
|
||||
case SCHEMA.TIMESTAMP_DATE_TIME:
|
||||
return parseRfc3339DateTimeWithOffset(data);
|
||||
case SCHEMA.TIMESTAMP_HTTP_DATE:
|
||||
return parseRfc7231DateTime(data);
|
||||
case SCHEMA.TIMESTAMP_EPOCH_SECONDS:
|
||||
return parseEpochTimestamp(data);
|
||||
default:
|
||||
console.warn("Missing timestamp format, parsing value with Date constructor:", data);
|
||||
return new Date(data);
|
||||
}
|
||||
}
|
||||
if (ns.isStringSchema()) {
|
||||
const mediaType = ns.getMergedTraits().mediaType;
|
||||
let intermediateValue = data;
|
||||
if (mediaType) {
|
||||
if (ns.getMergedTraits().httpHeader) {
|
||||
intermediateValue = this.base64ToUtf8(intermediateValue);
|
||||
}
|
||||
const isJson = mediaType === "application/json" || mediaType.endsWith("+json");
|
||||
if (isJson) {
|
||||
intermediateValue = LazyJsonString.from(intermediateValue);
|
||||
}
|
||||
return intermediateValue;
|
||||
}
|
||||
}
|
||||
switch (true) {
|
||||
case ns.isNumericSchema():
|
||||
return Number(data);
|
||||
case ns.isBigIntegerSchema():
|
||||
return BigInt(data);
|
||||
case ns.isBigDecimalSchema():
|
||||
return new NumericValue(data, "bigDecimal");
|
||||
case ns.isBooleanSchema():
|
||||
return String(data).toLowerCase() === "true";
|
||||
}
|
||||
return data;
|
||||
}
|
||||
base64ToUtf8(base64String) {
|
||||
return (this.serdeContext?.utf8Encoder ?? toUtf8)((this.serdeContext?.base64Decoder ?? fromBase64)(base64String));
|
||||
}
|
||||
}
|
||||
Generated
Vendored
+38
@@ -0,0 +1,38 @@
|
||||
import { NormalizedSchema } from "@smithy/core/schema";
|
||||
import { fromUtf8, toUtf8 } from "@smithy/util-utf8";
|
||||
import { FromStringShapeDeserializer } from "./FromStringShapeDeserializer";
|
||||
export class HttpInterceptingShapeDeserializer {
|
||||
constructor(codecDeserializer, codecSettings) {
|
||||
this.codecDeserializer = codecDeserializer;
|
||||
this.stringDeserializer = new FromStringShapeDeserializer(codecSettings);
|
||||
}
|
||||
setSerdeContext(serdeContext) {
|
||||
this.stringDeserializer.setSerdeContext(serdeContext);
|
||||
this.codecDeserializer.setSerdeContext(serdeContext);
|
||||
this.serdeContext = serdeContext;
|
||||
}
|
||||
read(schema, data) {
|
||||
const ns = NormalizedSchema.of(schema);
|
||||
const traits = ns.getMergedTraits();
|
||||
const toString = this.serdeContext?.utf8Encoder ?? toUtf8;
|
||||
if (traits.httpHeader || traits.httpResponseCode) {
|
||||
return this.stringDeserializer.read(ns, toString(data));
|
||||
}
|
||||
if (traits.httpPayload) {
|
||||
if (ns.isBlobSchema()) {
|
||||
const toBytes = this.serdeContext?.utf8Decoder ?? fromUtf8;
|
||||
if (typeof data === "string") {
|
||||
return toBytes(data);
|
||||
}
|
||||
return data;
|
||||
}
|
||||
else if (ns.isStringSchema()) {
|
||||
if ("byteLength" in data) {
|
||||
return toString(data);
|
||||
}
|
||||
return data;
|
||||
}
|
||||
}
|
||||
return this.codecDeserializer.read(ns, data);
|
||||
}
|
||||
}
|
||||
Generated
Vendored
+30
@@ -0,0 +1,30 @@
|
||||
import { NormalizedSchema } from "@smithy/core/schema";
|
||||
import { ToStringShapeSerializer } from "./ToStringShapeSerializer";
|
||||
export class HttpInterceptingShapeSerializer {
|
||||
constructor(codecSerializer, codecSettings, stringSerializer = new ToStringShapeSerializer(codecSettings)) {
|
||||
this.codecSerializer = codecSerializer;
|
||||
this.stringSerializer = stringSerializer;
|
||||
}
|
||||
setSerdeContext(serdeContext) {
|
||||
this.codecSerializer.setSerdeContext(serdeContext);
|
||||
this.stringSerializer.setSerdeContext(serdeContext);
|
||||
}
|
||||
write(schema, value) {
|
||||
const ns = NormalizedSchema.of(schema);
|
||||
const traits = ns.getMergedTraits();
|
||||
if (traits.httpHeader || traits.httpLabel || traits.httpQuery) {
|
||||
this.stringSerializer.write(ns, value);
|
||||
this.buffer = this.stringSerializer.flush();
|
||||
return;
|
||||
}
|
||||
return this.codecSerializer.write(ns, value);
|
||||
}
|
||||
flush() {
|
||||
if (this.buffer !== undefined) {
|
||||
const buffer = this.buffer;
|
||||
this.buffer = undefined;
|
||||
return buffer;
|
||||
}
|
||||
return this.codecSerializer.flush();
|
||||
}
|
||||
}
|
||||
Generated
Vendored
+87
@@ -0,0 +1,87 @@
|
||||
import { NormalizedSchema, SCHEMA } from "@smithy/core/schema";
|
||||
import { dateToUtcString, LazyJsonString, quoteHeader } from "@smithy/core/serde";
|
||||
import { toBase64 } from "@smithy/util-base64";
|
||||
import { determineTimestampFormat } from "./determineTimestampFormat";
|
||||
export class ToStringShapeSerializer {
|
||||
constructor(settings) {
|
||||
this.settings = settings;
|
||||
this.stringBuffer = "";
|
||||
this.serdeContext = undefined;
|
||||
}
|
||||
setSerdeContext(serdeContext) {
|
||||
this.serdeContext = serdeContext;
|
||||
}
|
||||
write(schema, value) {
|
||||
const ns = NormalizedSchema.of(schema);
|
||||
switch (typeof value) {
|
||||
case "object":
|
||||
if (value === null) {
|
||||
this.stringBuffer = "null";
|
||||
return;
|
||||
}
|
||||
if (ns.isTimestampSchema()) {
|
||||
if (!(value instanceof Date)) {
|
||||
throw new Error(`@smithy/core/protocols - received non-Date value ${value} when schema expected Date in ${ns.getName(true)}`);
|
||||
}
|
||||
const format = determineTimestampFormat(ns, this.settings);
|
||||
switch (format) {
|
||||
case SCHEMA.TIMESTAMP_DATE_TIME:
|
||||
this.stringBuffer = value.toISOString().replace(".000Z", "Z");
|
||||
break;
|
||||
case SCHEMA.TIMESTAMP_HTTP_DATE:
|
||||
this.stringBuffer = dateToUtcString(value);
|
||||
break;
|
||||
case SCHEMA.TIMESTAMP_EPOCH_SECONDS:
|
||||
this.stringBuffer = String(value.getTime() / 1000);
|
||||
break;
|
||||
default:
|
||||
console.warn("Missing timestamp format, using epoch seconds", value);
|
||||
this.stringBuffer = String(value.getTime() / 1000);
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (ns.isBlobSchema() && "byteLength" in value) {
|
||||
this.stringBuffer = (this.serdeContext?.base64Encoder ?? toBase64)(value);
|
||||
return;
|
||||
}
|
||||
if (ns.isListSchema() && Array.isArray(value)) {
|
||||
let buffer = "";
|
||||
for (const item of value) {
|
||||
this.write([ns.getValueSchema(), ns.getMergedTraits()], item);
|
||||
const headerItem = this.flush();
|
||||
const serialized = ns.getValueSchema().isTimestampSchema() ? headerItem : quoteHeader(headerItem);
|
||||
if (buffer !== "") {
|
||||
buffer += ", ";
|
||||
}
|
||||
buffer += serialized;
|
||||
}
|
||||
this.stringBuffer = buffer;
|
||||
return;
|
||||
}
|
||||
this.stringBuffer = JSON.stringify(value, null, 2);
|
||||
break;
|
||||
case "string":
|
||||
const mediaType = ns.getMergedTraits().mediaType;
|
||||
let intermediateValue = value;
|
||||
if (mediaType) {
|
||||
const isJson = mediaType === "application/json" || mediaType.endsWith("+json");
|
||||
if (isJson) {
|
||||
intermediateValue = LazyJsonString.from(intermediateValue);
|
||||
}
|
||||
if (ns.getMergedTraits().httpHeader) {
|
||||
this.stringBuffer = (this.serdeContext?.base64Encoder ?? toBase64)(intermediateValue.toString());
|
||||
return;
|
||||
}
|
||||
}
|
||||
this.stringBuffer = value;
|
||||
break;
|
||||
default:
|
||||
this.stringBuffer = String(value);
|
||||
}
|
||||
}
|
||||
flush() {
|
||||
const buffer = this.stringBuffer;
|
||||
this.stringBuffer = "";
|
||||
return buffer;
|
||||
}
|
||||
}
|
||||
Generated
Vendored
+20
@@ -0,0 +1,20 @@
|
||||
import { SCHEMA } from "@smithy/core/schema";
|
||||
export function determineTimestampFormat(ns, settings) {
|
||||
if (settings.timestampFormat.useTrait) {
|
||||
if (ns.isTimestampSchema() &&
|
||||
(ns.getSchema() === SCHEMA.TIMESTAMP_DATE_TIME ||
|
||||
ns.getSchema() === SCHEMA.TIMESTAMP_HTTP_DATE ||
|
||||
ns.getSchema() === SCHEMA.TIMESTAMP_EPOCH_SECONDS)) {
|
||||
return ns.getSchema();
|
||||
}
|
||||
}
|
||||
const { httpLabel, httpPrefixHeaders, httpHeader, httpQuery } = ns.getMergedTraits();
|
||||
const bindingFormat = settings.httpBindings
|
||||
? typeof httpPrefixHeaders === "string" || Boolean(httpHeader)
|
||||
? SCHEMA.TIMESTAMP_HTTP_DATE
|
||||
: Boolean(httpQuery) || Boolean(httpLabel)
|
||||
? SCHEMA.TIMESTAMP_DATE_TIME
|
||||
: undefined
|
||||
: undefined;
|
||||
return bindingFormat ?? settings.timestampFormat.default;
|
||||
}
|
||||
Generated
Vendored
+49
@@ -0,0 +1,49 @@
|
||||
export class TypeRegistry {
|
||||
constructor(namespace, schemas = new Map()) {
|
||||
this.namespace = namespace;
|
||||
this.schemas = schemas;
|
||||
}
|
||||
static for(namespace) {
|
||||
if (!TypeRegistry.registries.has(namespace)) {
|
||||
TypeRegistry.registries.set(namespace, new TypeRegistry(namespace));
|
||||
}
|
||||
return TypeRegistry.registries.get(namespace);
|
||||
}
|
||||
register(shapeId, schema) {
|
||||
const qualifiedName = this.normalizeShapeId(shapeId);
|
||||
const registry = TypeRegistry.for(this.getNamespace(shapeId));
|
||||
registry.schemas.set(qualifiedName, schema);
|
||||
}
|
||||
getSchema(shapeId) {
|
||||
const id = this.normalizeShapeId(shapeId);
|
||||
if (!this.schemas.has(id)) {
|
||||
throw new Error(`@smithy/core/schema - schema not found for ${id}`);
|
||||
}
|
||||
return this.schemas.get(id);
|
||||
}
|
||||
getBaseException() {
|
||||
for (const [id, schema] of this.schemas.entries()) {
|
||||
if (id.startsWith("smithy.ts.sdk.synthetic.") && id.endsWith("ServiceException")) {
|
||||
return schema;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
find(predicate) {
|
||||
return [...this.schemas.values()].find(predicate);
|
||||
}
|
||||
destroy() {
|
||||
TypeRegistry.registries.delete(this.namespace);
|
||||
this.schemas.clear();
|
||||
}
|
||||
normalizeShapeId(shapeId) {
|
||||
if (shapeId.includes("#")) {
|
||||
return shapeId;
|
||||
}
|
||||
return this.namespace + "#" + shapeId;
|
||||
}
|
||||
getNamespace(shapeId) {
|
||||
return this.normalizeShapeId(shapeId).split("#")[0];
|
||||
}
|
||||
}
|
||||
TypeRegistry.registries = new Map();
|
||||
+6
@@ -0,0 +1,6 @@
|
||||
export const deref = (schemaRef) => {
|
||||
if (typeof schemaRef === "function") {
|
||||
return schemaRef();
|
||||
}
|
||||
return schemaRef;
|
||||
};
|
||||
+12
@@ -0,0 +1,12 @@
|
||||
export * from "./deref";
|
||||
export * from "./middleware/getSchemaSerdePlugin";
|
||||
export * from "./schemas/ListSchema";
|
||||
export * from "./schemas/MapSchema";
|
||||
export * from "./schemas/OperationSchema";
|
||||
export * from "./schemas/ErrorSchema";
|
||||
export * from "./schemas/NormalizedSchema";
|
||||
export * from "./schemas/Schema";
|
||||
export * from "./schemas/SimpleSchema";
|
||||
export * from "./schemas/StructureSchema";
|
||||
export * from "./schemas/sentinels";
|
||||
export * from "./TypeRegistry";
|
||||
Generated
Vendored
+23
@@ -0,0 +1,23 @@
|
||||
import { schemaDeserializationMiddleware } from "./schemaDeserializationMiddleware";
|
||||
import { schemaSerializationMiddleware } from "./schemaSerializationMiddleware";
|
||||
export const deserializerMiddlewareOption = {
|
||||
name: "deserializerMiddleware",
|
||||
step: "deserialize",
|
||||
tags: ["DESERIALIZER"],
|
||||
override: true,
|
||||
};
|
||||
export const serializerMiddlewareOption = {
|
||||
name: "serializerMiddleware",
|
||||
step: "serialize",
|
||||
tags: ["SERIALIZER"],
|
||||
override: true,
|
||||
};
|
||||
export function getSchemaSerdePlugin(config) {
|
||||
return {
|
||||
applyToStack: (commandStack) => {
|
||||
commandStack.add(schemaSerializationMiddleware(config), serializerMiddlewareOption);
|
||||
commandStack.add(schemaDeserializationMiddleware(config), deserializerMiddlewareOption);
|
||||
config.protocol.setSerdeContext(config);
|
||||
},
|
||||
};
|
||||
}
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
export {};
|
||||
Generated
Vendored
+60
@@ -0,0 +1,60 @@
|
||||
import { HttpResponse } from "@smithy/protocol-http";
|
||||
import { getSmithyContext } from "@smithy/util-middleware";
|
||||
export const schemaDeserializationMiddleware = (config) => (next, context) => async (args) => {
|
||||
const { response } = await next(args);
|
||||
const { operationSchema } = getSmithyContext(context);
|
||||
try {
|
||||
const parsed = await config.protocol.deserializeResponse(operationSchema, {
|
||||
...config,
|
||||
...context,
|
||||
}, response);
|
||||
return {
|
||||
response,
|
||||
output: parsed,
|
||||
};
|
||||
}
|
||||
catch (error) {
|
||||
Object.defineProperty(error, "$response", {
|
||||
value: response,
|
||||
});
|
||||
if (!("$metadata" in error)) {
|
||||
const hint = `Deserialization error: to see the raw response, inspect the hidden field {error}.$response on this object.`;
|
||||
try {
|
||||
error.message += "\n " + hint;
|
||||
}
|
||||
catch (e) {
|
||||
if (!context.logger || context.logger?.constructor?.name === "NoOpLogger") {
|
||||
console.warn(hint);
|
||||
}
|
||||
else {
|
||||
context.logger?.warn?.(hint);
|
||||
}
|
||||
}
|
||||
if (typeof error.$responseBodyText !== "undefined") {
|
||||
if (error.$response) {
|
||||
error.$response.body = error.$responseBodyText;
|
||||
}
|
||||
}
|
||||
try {
|
||||
if (HttpResponse.isInstance(response)) {
|
||||
const { headers = {} } = response;
|
||||
const headerEntries = Object.entries(headers);
|
||||
error.$metadata = {
|
||||
httpStatusCode: response.statusCode,
|
||||
requestId: findHeader(/^x-[\w-]+-request-?id$/, headerEntries),
|
||||
extendedRequestId: findHeader(/^x-[\w-]+-id-2$/, headerEntries),
|
||||
cfId: findHeader(/^x-[\w-]+-cf-id$/, headerEntries),
|
||||
};
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
}
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
const findHeader = (pattern, headers) => {
|
||||
return (headers.find(([k]) => {
|
||||
return k.match(pattern);
|
||||
}) || [void 0, void 1])[1];
|
||||
};
|
||||
Generated
Vendored
+16
@@ -0,0 +1,16 @@
|
||||
import { getSmithyContext } from "@smithy/util-middleware";
|
||||
export const schemaSerializationMiddleware = (config) => (next, context) => async (args) => {
|
||||
const { operationSchema } = getSmithyContext(context);
|
||||
const endpoint = context.endpointV2?.url && config.urlParser
|
||||
? async () => config.urlParser(context.endpointV2.url)
|
||||
: config.endpoint;
|
||||
const request = await config.protocol.serializeRequest(operationSchema, args.input, {
|
||||
...config,
|
||||
...context,
|
||||
endpoint,
|
||||
});
|
||||
return next({
|
||||
...args,
|
||||
request,
|
||||
});
|
||||
};
|
||||
Generated
Vendored
+17
@@ -0,0 +1,17 @@
|
||||
import { TypeRegistry } from "../TypeRegistry";
|
||||
import { StructureSchema } from "./StructureSchema";
|
||||
export class ErrorSchema extends StructureSchema {
|
||||
constructor(name, traits, memberNames, memberList, ctor) {
|
||||
super(name, traits, memberNames, memberList);
|
||||
this.name = name;
|
||||
this.traits = traits;
|
||||
this.memberNames = memberNames;
|
||||
this.memberList = memberList;
|
||||
this.ctor = ctor;
|
||||
}
|
||||
}
|
||||
export function error(namespace, name, traits = {}, memberNames, memberList, ctor) {
|
||||
const schema = new ErrorSchema(namespace + "#" + name, traits, memberNames, memberList, ctor);
|
||||
TypeRegistry.for(namespace).register(name, schema);
|
||||
return schema;
|
||||
}
|
||||
Generated
Vendored
+15
@@ -0,0 +1,15 @@
|
||||
import { TypeRegistry } from "../TypeRegistry";
|
||||
import { Schema } from "./Schema";
|
||||
export class ListSchema extends Schema {
|
||||
constructor(name, traits, valueSchema) {
|
||||
super(name, traits);
|
||||
this.name = name;
|
||||
this.traits = traits;
|
||||
this.valueSchema = valueSchema;
|
||||
}
|
||||
}
|
||||
export function list(namespace, name, traits = {}, valueSchema) {
|
||||
const schema = new ListSchema(namespace + "#" + name, traits, typeof valueSchema === "function" ? valueSchema() : valueSchema);
|
||||
TypeRegistry.for(namespace).register(name, schema);
|
||||
return schema;
|
||||
}
|
||||
Generated
Vendored
+16
@@ -0,0 +1,16 @@
|
||||
import { TypeRegistry } from "../TypeRegistry";
|
||||
import { Schema } from "./Schema";
|
||||
export class MapSchema extends Schema {
|
||||
constructor(name, traits, keySchema, valueSchema) {
|
||||
super(name, traits);
|
||||
this.name = name;
|
||||
this.traits = traits;
|
||||
this.keySchema = keySchema;
|
||||
this.valueSchema = valueSchema;
|
||||
}
|
||||
}
|
||||
export function map(namespace, name, traits = {}, keySchema, valueSchema) {
|
||||
const schema = new MapSchema(namespace + "#" + name, traits, keySchema, typeof valueSchema === "function" ? valueSchema() : valueSchema);
|
||||
TypeRegistry.for(namespace).register(name, schema);
|
||||
return schema;
|
||||
}
|
||||
Generated
Vendored
+294
@@ -0,0 +1,294 @@
|
||||
import { deref } from "../deref";
|
||||
import { ListSchema } from "./ListSchema";
|
||||
import { MapSchema } from "./MapSchema";
|
||||
import { SCHEMA } from "./sentinels";
|
||||
import { SimpleSchema } from "./SimpleSchema";
|
||||
import { StructureSchema } from "./StructureSchema";
|
||||
export class NormalizedSchema {
|
||||
constructor(ref, memberName) {
|
||||
this.ref = ref;
|
||||
this.memberName = memberName;
|
||||
const traitStack = [];
|
||||
let _ref = ref;
|
||||
let schema = ref;
|
||||
this._isMemberSchema = false;
|
||||
while (Array.isArray(_ref)) {
|
||||
traitStack.push(_ref[1]);
|
||||
_ref = _ref[0];
|
||||
schema = deref(_ref);
|
||||
this._isMemberSchema = true;
|
||||
}
|
||||
if (traitStack.length > 0) {
|
||||
this.memberTraits = {};
|
||||
for (let i = traitStack.length - 1; i >= 0; --i) {
|
||||
const traitSet = traitStack[i];
|
||||
Object.assign(this.memberTraits, NormalizedSchema.translateTraits(traitSet));
|
||||
}
|
||||
}
|
||||
else {
|
||||
this.memberTraits = 0;
|
||||
}
|
||||
if (schema instanceof NormalizedSchema) {
|
||||
this.name = schema.name;
|
||||
this.traits = schema.traits;
|
||||
this._isMemberSchema = schema._isMemberSchema;
|
||||
this.schema = schema.schema;
|
||||
this.memberTraits = Object.assign({}, schema.getMemberTraits(), this.getMemberTraits());
|
||||
this.normalizedTraits = void 0;
|
||||
this.ref = schema.ref;
|
||||
this.memberName = memberName ?? schema.memberName;
|
||||
return;
|
||||
}
|
||||
this.schema = deref(schema);
|
||||
if (this.schema && typeof this.schema === "object") {
|
||||
this.traits = this.schema?.traits ?? {};
|
||||
}
|
||||
else {
|
||||
this.traits = 0;
|
||||
}
|
||||
this.name =
|
||||
(typeof this.schema === "object" ? this.schema?.name : void 0) ?? this.memberName ?? this.getSchemaName();
|
||||
if (this._isMemberSchema && !memberName) {
|
||||
throw new Error(`@smithy/core/schema - NormalizedSchema member schema ${this.getName(true)} must initialize with memberName argument.`);
|
||||
}
|
||||
}
|
||||
static of(ref, memberName) {
|
||||
if (ref instanceof NormalizedSchema) {
|
||||
return ref;
|
||||
}
|
||||
return new NormalizedSchema(ref, memberName);
|
||||
}
|
||||
static translateTraits(indicator) {
|
||||
if (typeof indicator === "object") {
|
||||
return indicator;
|
||||
}
|
||||
indicator = indicator | 0;
|
||||
const traits = {};
|
||||
if ((indicator & 1) === 1) {
|
||||
traits.httpLabel = 1;
|
||||
}
|
||||
if (((indicator >> 1) & 1) === 1) {
|
||||
traits.idempotent = 1;
|
||||
}
|
||||
if (((indicator >> 2) & 1) === 1) {
|
||||
traits.idempotencyToken = 1;
|
||||
}
|
||||
if (((indicator >> 3) & 1) === 1) {
|
||||
traits.sensitive = 1;
|
||||
}
|
||||
if (((indicator >> 4) & 1) === 1) {
|
||||
traits.httpPayload = 1;
|
||||
}
|
||||
if (((indicator >> 5) & 1) === 1) {
|
||||
traits.httpResponseCode = 1;
|
||||
}
|
||||
if (((indicator >> 6) & 1) === 1) {
|
||||
traits.httpQueryParams = 1;
|
||||
}
|
||||
return traits;
|
||||
}
|
||||
static memberFrom(memberSchema, memberName) {
|
||||
if (memberSchema instanceof NormalizedSchema) {
|
||||
memberSchema.memberName = memberName;
|
||||
memberSchema._isMemberSchema = true;
|
||||
return memberSchema;
|
||||
}
|
||||
return new NormalizedSchema(memberSchema, memberName);
|
||||
}
|
||||
getSchema() {
|
||||
if (this.schema instanceof NormalizedSchema) {
|
||||
return (this.schema = this.schema.getSchema());
|
||||
}
|
||||
if (this.schema instanceof SimpleSchema) {
|
||||
return deref(this.schema.schemaRef);
|
||||
}
|
||||
return deref(this.schema);
|
||||
}
|
||||
getName(withNamespace = false) {
|
||||
if (!withNamespace) {
|
||||
if (this.name && this.name.includes("#")) {
|
||||
return this.name.split("#")[1];
|
||||
}
|
||||
}
|
||||
return this.name || undefined;
|
||||
}
|
||||
getMemberName() {
|
||||
if (!this.isMemberSchema()) {
|
||||
throw new Error(`@smithy/core/schema - cannot get member name on non-member schema: ${this.getName(true)}`);
|
||||
}
|
||||
return this.memberName;
|
||||
}
|
||||
isMemberSchema() {
|
||||
return this._isMemberSchema;
|
||||
}
|
||||
isUnitSchema() {
|
||||
return this.getSchema() === "unit";
|
||||
}
|
||||
isListSchema() {
|
||||
const inner = this.getSchema();
|
||||
if (typeof inner === "number") {
|
||||
return inner >= SCHEMA.LIST_MODIFIER && inner < SCHEMA.MAP_MODIFIER;
|
||||
}
|
||||
return inner instanceof ListSchema;
|
||||
}
|
||||
isMapSchema() {
|
||||
const inner = this.getSchema();
|
||||
if (typeof inner === "number") {
|
||||
return inner >= SCHEMA.MAP_MODIFIER && inner <= 255;
|
||||
}
|
||||
return inner instanceof MapSchema;
|
||||
}
|
||||
isDocumentSchema() {
|
||||
return this.getSchema() === SCHEMA.DOCUMENT;
|
||||
}
|
||||
isStructSchema() {
|
||||
const inner = this.getSchema();
|
||||
return (inner !== null && typeof inner === "object" && "members" in inner) || inner instanceof StructureSchema;
|
||||
}
|
||||
isBlobSchema() {
|
||||
return this.getSchema() === SCHEMA.BLOB || this.getSchema() === SCHEMA.STREAMING_BLOB;
|
||||
}
|
||||
isTimestampSchema() {
|
||||
const schema = this.getSchema();
|
||||
return typeof schema === "number" && schema >= SCHEMA.TIMESTAMP_DEFAULT && schema <= SCHEMA.TIMESTAMP_EPOCH_SECONDS;
|
||||
}
|
||||
isStringSchema() {
|
||||
return this.getSchema() === SCHEMA.STRING;
|
||||
}
|
||||
isBooleanSchema() {
|
||||
return this.getSchema() === SCHEMA.BOOLEAN;
|
||||
}
|
||||
isNumericSchema() {
|
||||
return this.getSchema() === SCHEMA.NUMERIC;
|
||||
}
|
||||
isBigIntegerSchema() {
|
||||
return this.getSchema() === SCHEMA.BIG_INTEGER;
|
||||
}
|
||||
isBigDecimalSchema() {
|
||||
return this.getSchema() === SCHEMA.BIG_DECIMAL;
|
||||
}
|
||||
isStreaming() {
|
||||
const streaming = !!this.getMergedTraits().streaming;
|
||||
if (streaming) {
|
||||
return true;
|
||||
}
|
||||
return this.getSchema() === SCHEMA.STREAMING_BLOB;
|
||||
}
|
||||
getMergedTraits() {
|
||||
if (this.normalizedTraits) {
|
||||
return this.normalizedTraits;
|
||||
}
|
||||
this.normalizedTraits = {
|
||||
...this.getOwnTraits(),
|
||||
...this.getMemberTraits(),
|
||||
};
|
||||
return this.normalizedTraits;
|
||||
}
|
||||
getMemberTraits() {
|
||||
return NormalizedSchema.translateTraits(this.memberTraits);
|
||||
}
|
||||
getOwnTraits() {
|
||||
return NormalizedSchema.translateTraits(this.traits);
|
||||
}
|
||||
getKeySchema() {
|
||||
if (this.isDocumentSchema()) {
|
||||
return NormalizedSchema.memberFrom([SCHEMA.DOCUMENT, 0], "key");
|
||||
}
|
||||
if (!this.isMapSchema()) {
|
||||
throw new Error(`@smithy/core/schema - cannot get key schema for non-map schema: ${this.getName(true)}`);
|
||||
}
|
||||
const schema = this.getSchema();
|
||||
if (typeof schema === "number") {
|
||||
return NormalizedSchema.memberFrom([63 & schema, 0], "key");
|
||||
}
|
||||
return NormalizedSchema.memberFrom([schema.keySchema, 0], "key");
|
||||
}
|
||||
getValueSchema() {
|
||||
const schema = this.getSchema();
|
||||
if (typeof schema === "number") {
|
||||
if (this.isMapSchema()) {
|
||||
return NormalizedSchema.memberFrom([63 & schema, 0], "value");
|
||||
}
|
||||
else if (this.isListSchema()) {
|
||||
return NormalizedSchema.memberFrom([63 & schema, 0], "member");
|
||||
}
|
||||
}
|
||||
if (schema && typeof schema === "object") {
|
||||
if (this.isStructSchema()) {
|
||||
throw new Error(`cannot call getValueSchema() with StructureSchema ${this.getName(true)}`);
|
||||
}
|
||||
const collection = schema;
|
||||
if ("valueSchema" in collection) {
|
||||
if (this.isMapSchema()) {
|
||||
return NormalizedSchema.memberFrom([collection.valueSchema, 0], "value");
|
||||
}
|
||||
else if (this.isListSchema()) {
|
||||
return NormalizedSchema.memberFrom([collection.valueSchema, 0], "member");
|
||||
}
|
||||
}
|
||||
}
|
||||
if (this.isDocumentSchema()) {
|
||||
return NormalizedSchema.memberFrom([SCHEMA.DOCUMENT, 0], "value");
|
||||
}
|
||||
throw new Error(`@smithy/core/schema - the schema ${this.getName(true)} does not have a value member.`);
|
||||
}
|
||||
getMemberSchema(member) {
|
||||
if (this.isStructSchema()) {
|
||||
const struct = this.getSchema();
|
||||
if (!(member in struct.members)) {
|
||||
throw new Error(`@smithy/core/schema - the schema ${this.getName(true)} does not have a member with name=${member}.`);
|
||||
}
|
||||
return NormalizedSchema.memberFrom(struct.members[member], member);
|
||||
}
|
||||
if (this.isDocumentSchema()) {
|
||||
return NormalizedSchema.memberFrom([SCHEMA.DOCUMENT, 0], member);
|
||||
}
|
||||
throw new Error(`@smithy/core/schema - the schema ${this.getName(true)} does not have members.`);
|
||||
}
|
||||
getMemberSchemas() {
|
||||
const { schema } = this;
|
||||
const struct = schema;
|
||||
if (!struct || typeof struct !== "object") {
|
||||
return {};
|
||||
}
|
||||
if ("members" in struct) {
|
||||
const buffer = {};
|
||||
for (const member of struct.memberNames) {
|
||||
buffer[member] = this.getMemberSchema(member);
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
return {};
|
||||
}
|
||||
*structIterator() {
|
||||
if (this.isUnitSchema()) {
|
||||
return;
|
||||
}
|
||||
if (!this.isStructSchema()) {
|
||||
throw new Error("@smithy/core/schema - cannot acquire structIterator on non-struct schema.");
|
||||
}
|
||||
const struct = this.getSchema();
|
||||
for (let i = 0; i < struct.memberNames.length; ++i) {
|
||||
yield [struct.memberNames[i], NormalizedSchema.memberFrom([struct.memberList[i], 0], struct.memberNames[i])];
|
||||
}
|
||||
}
|
||||
getSchemaName() {
|
||||
const schema = this.getSchema();
|
||||
if (typeof schema === "number") {
|
||||
const _schema = 63 & schema;
|
||||
const container = 192 & schema;
|
||||
const type = Object.entries(SCHEMA).find(([, value]) => {
|
||||
return value === _schema;
|
||||
})?.[0] ?? "Unknown";
|
||||
switch (container) {
|
||||
case SCHEMA.MAP_MODIFIER:
|
||||
return `${type}Map`;
|
||||
case SCHEMA.LIST_MODIFIER:
|
||||
return `${type}List`;
|
||||
case 0:
|
||||
return type;
|
||||
}
|
||||
}
|
||||
return "Unknown";
|
||||
}
|
||||
}
|
||||
Generated
Vendored
+16
@@ -0,0 +1,16 @@
|
||||
import { TypeRegistry } from "../TypeRegistry";
|
||||
import { Schema } from "./Schema";
|
||||
export class OperationSchema extends Schema {
|
||||
constructor(name, traits, input, output) {
|
||||
super(name, traits);
|
||||
this.name = name;
|
||||
this.traits = traits;
|
||||
this.input = input;
|
||||
this.output = output;
|
||||
}
|
||||
}
|
||||
export function op(namespace, name, traits = {}, input, output) {
|
||||
const schema = new OperationSchema(namespace + "#" + name, traits, input, output);
|
||||
TypeRegistry.for(namespace).register(name, schema);
|
||||
return schema;
|
||||
}
|
||||
Generated
Vendored
+6
@@ -0,0 +1,6 @@
|
||||
export class Schema {
|
||||
constructor(name, traits) {
|
||||
this.name = name;
|
||||
this.traits = traits;
|
||||
}
|
||||
}
|
||||
Generated
Vendored
+15
@@ -0,0 +1,15 @@
|
||||
import { TypeRegistry } from "../TypeRegistry";
|
||||
import { Schema } from "./Schema";
|
||||
export class SimpleSchema extends Schema {
|
||||
constructor(name, schemaRef, traits) {
|
||||
super(name, traits);
|
||||
this.name = name;
|
||||
this.schemaRef = schemaRef;
|
||||
this.traits = traits;
|
||||
}
|
||||
}
|
||||
export function sim(namespace, name, schemaRef, traits) {
|
||||
const schema = new SimpleSchema(namespace + "#" + name, schemaRef, traits);
|
||||
TypeRegistry.for(namespace).register(name, schema);
|
||||
return schema;
|
||||
}
|
||||
Generated
Vendored
+22
@@ -0,0 +1,22 @@
|
||||
import { TypeRegistry } from "../TypeRegistry";
|
||||
import { Schema } from "./Schema";
|
||||
export class StructureSchema extends Schema {
|
||||
constructor(name, traits, memberNames, memberList) {
|
||||
super(name, traits);
|
||||
this.name = name;
|
||||
this.traits = traits;
|
||||
this.memberNames = memberNames;
|
||||
this.memberList = memberList;
|
||||
this.members = {};
|
||||
for (let i = 0; i < memberNames.length; ++i) {
|
||||
this.members[memberNames[i]] = Array.isArray(memberList[i])
|
||||
? memberList[i]
|
||||
: [memberList[i], 0];
|
||||
}
|
||||
}
|
||||
}
|
||||
export function struct(namespace, name, traits, memberNames, memberList) {
|
||||
const schema = new StructureSchema(namespace + "#" + name, traits, memberNames, memberList);
|
||||
TypeRegistry.for(namespace).register(name, schema);
|
||||
return schema;
|
||||
}
|
||||
Generated
Vendored
+16
@@ -0,0 +1,16 @@
|
||||
export const SCHEMA = {
|
||||
BLOB: 21,
|
||||
STREAMING_BLOB: 42,
|
||||
BOOLEAN: 2,
|
||||
STRING: 0,
|
||||
NUMERIC: 1,
|
||||
BIG_INTEGER: 17,
|
||||
BIG_DECIMAL: 19,
|
||||
DOCUMENT: 15,
|
||||
TIMESTAMP_DEFAULT: 4,
|
||||
TIMESTAMP_DATE_TIME: 5,
|
||||
TIMESTAMP_HTTP_DATE: 6,
|
||||
TIMESTAMP_EPOCH_SECONDS: 7,
|
||||
LIST_MODIFIER: 64,
|
||||
MAP_MODIFIER: 128,
|
||||
};
|
||||
Generated
Vendored
+53
@@ -0,0 +1,53 @@
|
||||
import { NormalizedSchema } from "@smithy/core/schema";
|
||||
export const copyDocumentWithTransform = (source, schemaRef, transform = (_) => _) => {
|
||||
const ns = NormalizedSchema.of(schemaRef);
|
||||
switch (typeof source) {
|
||||
case "undefined":
|
||||
case "boolean":
|
||||
case "number":
|
||||
case "string":
|
||||
case "bigint":
|
||||
case "symbol":
|
||||
return transform(source, ns);
|
||||
case "function":
|
||||
case "object":
|
||||
if (source === null) {
|
||||
return transform(null, ns);
|
||||
}
|
||||
if (Array.isArray(source)) {
|
||||
const newArray = new Array(source.length);
|
||||
let i = 0;
|
||||
for (const item of source) {
|
||||
newArray[i++] = copyDocumentWithTransform(item, ns.getValueSchema(), transform);
|
||||
}
|
||||
return transform(newArray, ns);
|
||||
}
|
||||
if ("byteLength" in source) {
|
||||
const newBytes = new Uint8Array(source.byteLength);
|
||||
newBytes.set(source, 0);
|
||||
return transform(newBytes, ns);
|
||||
}
|
||||
if (source instanceof Date) {
|
||||
return transform(source, ns);
|
||||
}
|
||||
const newObject = {};
|
||||
if (ns.isMapSchema()) {
|
||||
for (const key of Object.keys(source)) {
|
||||
newObject[key] = copyDocumentWithTransform(source[key], ns.getValueSchema(), transform);
|
||||
}
|
||||
}
|
||||
else if (ns.isStructSchema()) {
|
||||
for (const [key, memberSchema] of ns.structIterator()) {
|
||||
newObject[key] = copyDocumentWithTransform(source[key], memberSchema, transform);
|
||||
}
|
||||
}
|
||||
else if (ns.isDocumentSchema()) {
|
||||
for (const key of Object.keys(source)) {
|
||||
newObject[key] = copyDocumentWithTransform(source[key], ns.getValueSchema(), transform);
|
||||
}
|
||||
}
|
||||
return transform(newObject, ns);
|
||||
default:
|
||||
return transform(source, ns);
|
||||
}
|
||||
};
|
||||
Generated
Vendored
+190
@@ -0,0 +1,190 @@
|
||||
import { strictParseByte, strictParseDouble, strictParseFloat32, strictParseShort } from "./parse-utils";
|
||||
const DAYS = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"];
|
||||
const MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"];
|
||||
export function dateToUtcString(date) {
|
||||
const year = date.getUTCFullYear();
|
||||
const month = date.getUTCMonth();
|
||||
const dayOfWeek = date.getUTCDay();
|
||||
const dayOfMonthInt = date.getUTCDate();
|
||||
const hoursInt = date.getUTCHours();
|
||||
const minutesInt = date.getUTCMinutes();
|
||||
const secondsInt = date.getUTCSeconds();
|
||||
const dayOfMonthString = dayOfMonthInt < 10 ? `0${dayOfMonthInt}` : `${dayOfMonthInt}`;
|
||||
const hoursString = hoursInt < 10 ? `0${hoursInt}` : `${hoursInt}`;
|
||||
const minutesString = minutesInt < 10 ? `0${minutesInt}` : `${minutesInt}`;
|
||||
const secondsString = secondsInt < 10 ? `0${secondsInt}` : `${secondsInt}`;
|
||||
return `${DAYS[dayOfWeek]}, ${dayOfMonthString} ${MONTHS[month]} ${year} ${hoursString}:${minutesString}:${secondsString} GMT`;
|
||||
}
|
||||
const RFC3339 = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?[zZ]$/);
|
||||
export const parseRfc3339DateTime = (value) => {
|
||||
if (value === null || value === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
if (typeof value !== "string") {
|
||||
throw new TypeError("RFC-3339 date-times must be expressed as strings");
|
||||
}
|
||||
const match = RFC3339.exec(value);
|
||||
if (!match) {
|
||||
throw new TypeError("Invalid RFC-3339 date-time value");
|
||||
}
|
||||
const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds] = match;
|
||||
const year = strictParseShort(stripLeadingZeroes(yearStr));
|
||||
const month = parseDateValue(monthStr, "month", 1, 12);
|
||||
const day = parseDateValue(dayStr, "day", 1, 31);
|
||||
return buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds });
|
||||
};
|
||||
const RFC3339_WITH_OFFSET = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?(([-+]\d{2}\:\d{2})|[zZ])$/);
|
||||
export const parseRfc3339DateTimeWithOffset = (value) => {
|
||||
if (value === null || value === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
if (typeof value !== "string") {
|
||||
throw new TypeError("RFC-3339 date-times must be expressed as strings");
|
||||
}
|
||||
const match = RFC3339_WITH_OFFSET.exec(value);
|
||||
if (!match) {
|
||||
throw new TypeError("Invalid RFC-3339 date-time value");
|
||||
}
|
||||
const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, offsetStr] = match;
|
||||
const year = strictParseShort(stripLeadingZeroes(yearStr));
|
||||
const month = parseDateValue(monthStr, "month", 1, 12);
|
||||
const day = parseDateValue(dayStr, "day", 1, 31);
|
||||
const date = buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds });
|
||||
if (offsetStr.toUpperCase() != "Z") {
|
||||
date.setTime(date.getTime() - parseOffsetToMilliseconds(offsetStr));
|
||||
}
|
||||
return date;
|
||||
};
|
||||
const IMF_FIXDATE = new RegExp(/^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), (\d{2}) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) (\d{4}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/);
|
||||
const RFC_850_DATE = new RegExp(/^(?:Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday), (\d{2})-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-(\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/);
|
||||
const ASC_TIME = new RegExp(/^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) ( [1-9]|\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? (\d{4})$/);
|
||||
export const parseRfc7231DateTime = (value) => {
|
||||
if (value === null || value === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
if (typeof value !== "string") {
|
||||
throw new TypeError("RFC-7231 date-times must be expressed as strings");
|
||||
}
|
||||
let match = IMF_FIXDATE.exec(value);
|
||||
if (match) {
|
||||
const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match;
|
||||
return buildDate(strictParseShort(stripLeadingZeroes(yearStr)), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { hours, minutes, seconds, fractionalMilliseconds });
|
||||
}
|
||||
match = RFC_850_DATE.exec(value);
|
||||
if (match) {
|
||||
const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match;
|
||||
return adjustRfc850Year(buildDate(parseTwoDigitYear(yearStr), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), {
|
||||
hours,
|
||||
minutes,
|
||||
seconds,
|
||||
fractionalMilliseconds,
|
||||
}));
|
||||
}
|
||||
match = ASC_TIME.exec(value);
|
||||
if (match) {
|
||||
const [_, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, yearStr] = match;
|
||||
return buildDate(strictParseShort(stripLeadingZeroes(yearStr)), parseMonthByShortName(monthStr), parseDateValue(dayStr.trimLeft(), "day", 1, 31), { hours, minutes, seconds, fractionalMilliseconds });
|
||||
}
|
||||
throw new TypeError("Invalid RFC-7231 date-time value");
|
||||
};
|
||||
export const parseEpochTimestamp = (value) => {
|
||||
if (value === null || value === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
let valueAsDouble;
|
||||
if (typeof value === "number") {
|
||||
valueAsDouble = value;
|
||||
}
|
||||
else if (typeof value === "string") {
|
||||
valueAsDouble = strictParseDouble(value);
|
||||
}
|
||||
else if (typeof value === "object" && value.tag === 1) {
|
||||
valueAsDouble = value.value;
|
||||
}
|
||||
else {
|
||||
throw new TypeError("Epoch timestamps must be expressed as floating point numbers or their string representation");
|
||||
}
|
||||
if (Number.isNaN(valueAsDouble) || valueAsDouble === Infinity || valueAsDouble === -Infinity) {
|
||||
throw new TypeError("Epoch timestamps must be valid, non-Infinite, non-NaN numerics");
|
||||
}
|
||||
return new Date(Math.round(valueAsDouble * 1000));
|
||||
};
|
||||
const buildDate = (year, month, day, time) => {
|
||||
const adjustedMonth = month - 1;
|
||||
validateDayOfMonth(year, adjustedMonth, day);
|
||||
return new Date(Date.UTC(year, adjustedMonth, day, parseDateValue(time.hours, "hour", 0, 23), parseDateValue(time.minutes, "minute", 0, 59), parseDateValue(time.seconds, "seconds", 0, 60), parseMilliseconds(time.fractionalMilliseconds)));
|
||||
};
|
||||
const parseTwoDigitYear = (value) => {
|
||||
const thisYear = new Date().getUTCFullYear();
|
||||
const valueInThisCentury = Math.floor(thisYear / 100) * 100 + strictParseShort(stripLeadingZeroes(value));
|
||||
if (valueInThisCentury < thisYear) {
|
||||
return valueInThisCentury + 100;
|
||||
}
|
||||
return valueInThisCentury;
|
||||
};
|
||||
const FIFTY_YEARS_IN_MILLIS = 50 * 365 * 24 * 60 * 60 * 1000;
|
||||
const adjustRfc850Year = (input) => {
|
||||
if (input.getTime() - new Date().getTime() > FIFTY_YEARS_IN_MILLIS) {
|
||||
return new Date(Date.UTC(input.getUTCFullYear() - 100, input.getUTCMonth(), input.getUTCDate(), input.getUTCHours(), input.getUTCMinutes(), input.getUTCSeconds(), input.getUTCMilliseconds()));
|
||||
}
|
||||
return input;
|
||||
};
|
||||
const parseMonthByShortName = (value) => {
|
||||
const monthIdx = MONTHS.indexOf(value);
|
||||
if (monthIdx < 0) {
|
||||
throw new TypeError(`Invalid month: ${value}`);
|
||||
}
|
||||
return monthIdx + 1;
|
||||
};
|
||||
const DAYS_IN_MONTH = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31];
|
||||
const validateDayOfMonth = (year, month, day) => {
|
||||
let maxDays = DAYS_IN_MONTH[month];
|
||||
if (month === 1 && isLeapYear(year)) {
|
||||
maxDays = 29;
|
||||
}
|
||||
if (day > maxDays) {
|
||||
throw new TypeError(`Invalid day for ${MONTHS[month]} in ${year}: ${day}`);
|
||||
}
|
||||
};
|
||||
const isLeapYear = (year) => {
|
||||
return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0);
|
||||
};
|
||||
const parseDateValue = (value, type, lower, upper) => {
|
||||
const dateVal = strictParseByte(stripLeadingZeroes(value));
|
||||
if (dateVal < lower || dateVal > upper) {
|
||||
throw new TypeError(`${type} must be between ${lower} and ${upper}, inclusive`);
|
||||
}
|
||||
return dateVal;
|
||||
};
|
||||
const parseMilliseconds = (value) => {
|
||||
if (value === null || value === undefined) {
|
||||
return 0;
|
||||
}
|
||||
return strictParseFloat32("0." + value) * 1000;
|
||||
};
|
||||
const parseOffsetToMilliseconds = (value) => {
|
||||
const directionStr = value[0];
|
||||
let direction = 1;
|
||||
if (directionStr == "+") {
|
||||
direction = 1;
|
||||
}
|
||||
else if (directionStr == "-") {
|
||||
direction = -1;
|
||||
}
|
||||
else {
|
||||
throw new TypeError(`Offset direction, ${directionStr}, must be "+" or "-"`);
|
||||
}
|
||||
const hour = Number(value.substring(1, 3));
|
||||
const minute = Number(value.substring(4, 6));
|
||||
return direction * (hour * 60 + minute) * 60 * 1000;
|
||||
};
|
||||
const stripLeadingZeroes = (value) => {
|
||||
let idx = 0;
|
||||
while (idx < value.length - 1 && value.charAt(idx) === "0") {
|
||||
idx++;
|
||||
}
|
||||
if (idx === 0) {
|
||||
return value;
|
||||
}
|
||||
return value.slice(idx);
|
||||
};
|
||||
+8
@@ -0,0 +1,8 @@
|
||||
export * from "./copyDocumentWithTransform";
|
||||
export * from "./date-utils";
|
||||
export * from "./lazy-json";
|
||||
export * from "./parse-utils";
|
||||
export * from "./quote-header";
|
||||
export * from "./split-every";
|
||||
export * from "./split-header";
|
||||
export * from "./value/NumericValue";
|
||||
Generated
Vendored
+24
@@ -0,0 +1,24 @@
|
||||
export const LazyJsonString = function LazyJsonString(val) {
|
||||
const str = Object.assign(new String(val), {
|
||||
deserializeJSON() {
|
||||
return JSON.parse(String(val));
|
||||
},
|
||||
toString() {
|
||||
return String(val);
|
||||
},
|
||||
toJSON() {
|
||||
return String(val);
|
||||
},
|
||||
});
|
||||
return str;
|
||||
};
|
||||
LazyJsonString.from = (object) => {
|
||||
if (object && typeof object === "object" && (object instanceof LazyJsonString || "deserializeJSON" in object)) {
|
||||
return object;
|
||||
}
|
||||
else if (typeof object === "string" || Object.getPrototypeOf(object) === String.prototype) {
|
||||
return LazyJsonString(String(object));
|
||||
}
|
||||
return LazyJsonString(JSON.stringify(object));
|
||||
};
|
||||
LazyJsonString.fromObject = LazyJsonString.from;
|
||||
Generated
Vendored
+230
@@ -0,0 +1,230 @@
|
||||
export const parseBoolean = (value) => {
|
||||
switch (value) {
|
||||
case "true":
|
||||
return true;
|
||||
case "false":
|
||||
return false;
|
||||
default:
|
||||
throw new Error(`Unable to parse boolean value "${value}"`);
|
||||
}
|
||||
};
|
||||
export const expectBoolean = (value) => {
|
||||
if (value === null || value === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
if (typeof value === "number") {
|
||||
if (value === 0 || value === 1) {
|
||||
logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`));
|
||||
}
|
||||
if (value === 0) {
|
||||
return false;
|
||||
}
|
||||
if (value === 1) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
if (typeof value === "string") {
|
||||
const lower = value.toLowerCase();
|
||||
if (lower === "false" || lower === "true") {
|
||||
logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`));
|
||||
}
|
||||
if (lower === "false") {
|
||||
return false;
|
||||
}
|
||||
if (lower === "true") {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
if (typeof value === "boolean") {
|
||||
return value;
|
||||
}
|
||||
throw new TypeError(`Expected boolean, got ${typeof value}: ${value}`);
|
||||
};
|
||||
export const expectNumber = (value) => {
|
||||
if (value === null || value === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
if (typeof value === "string") {
|
||||
const parsed = parseFloat(value);
|
||||
if (!Number.isNaN(parsed)) {
|
||||
if (String(parsed) !== String(value)) {
|
||||
logger.warn(stackTraceWarning(`Expected number but observed string: ${value}`));
|
||||
}
|
||||
return parsed;
|
||||
}
|
||||
}
|
||||
if (typeof value === "number") {
|
||||
return value;
|
||||
}
|
||||
throw new TypeError(`Expected number, got ${typeof value}: ${value}`);
|
||||
};
|
||||
const MAX_FLOAT = Math.ceil(2 ** 127 * (2 - 2 ** -23));
|
||||
export const expectFloat32 = (value) => {
|
||||
const expected = expectNumber(value);
|
||||
if (expected !== undefined && !Number.isNaN(expected) && expected !== Infinity && expected !== -Infinity) {
|
||||
if (Math.abs(expected) > MAX_FLOAT) {
|
||||
throw new TypeError(`Expected 32-bit float, got ${value}`);
|
||||
}
|
||||
}
|
||||
return expected;
|
||||
};
|
||||
export const expectLong = (value) => {
|
||||
if (value === null || value === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
if (Number.isInteger(value) && !Number.isNaN(value)) {
|
||||
return value;
|
||||
}
|
||||
throw new TypeError(`Expected integer, got ${typeof value}: ${value}`);
|
||||
};
|
||||
export const expectInt = expectLong;
|
||||
export const expectInt32 = (value) => expectSizedInt(value, 32);
|
||||
export const expectShort = (value) => expectSizedInt(value, 16);
|
||||
export const expectByte = (value) => expectSizedInt(value, 8);
|
||||
const expectSizedInt = (value, size) => {
|
||||
const expected = expectLong(value);
|
||||
if (expected !== undefined && castInt(expected, size) !== expected) {
|
||||
throw new TypeError(`Expected ${size}-bit integer, got ${value}`);
|
||||
}
|
||||
return expected;
|
||||
};
|
||||
const castInt = (value, size) => {
|
||||
switch (size) {
|
||||
case 32:
|
||||
return Int32Array.of(value)[0];
|
||||
case 16:
|
||||
return Int16Array.of(value)[0];
|
||||
case 8:
|
||||
return Int8Array.of(value)[0];
|
||||
}
|
||||
};
|
||||
export const expectNonNull = (value, location) => {
|
||||
if (value === null || value === undefined) {
|
||||
if (location) {
|
||||
throw new TypeError(`Expected a non-null value for ${location}`);
|
||||
}
|
||||
throw new TypeError("Expected a non-null value");
|
||||
}
|
||||
return value;
|
||||
};
|
||||
export const expectObject = (value) => {
|
||||
if (value === null || value === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
if (typeof value === "object" && !Array.isArray(value)) {
|
||||
return value;
|
||||
}
|
||||
const receivedType = Array.isArray(value) ? "array" : typeof value;
|
||||
throw new TypeError(`Expected object, got ${receivedType}: ${value}`);
|
||||
};
|
||||
export const expectString = (value) => {
|
||||
if (value === null || value === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
if (typeof value === "string") {
|
||||
return value;
|
||||
}
|
||||
if (["boolean", "number", "bigint"].includes(typeof value)) {
|
||||
logger.warn(stackTraceWarning(`Expected string, got ${typeof value}: ${value}`));
|
||||
return String(value);
|
||||
}
|
||||
throw new TypeError(`Expected string, got ${typeof value}: ${value}`);
|
||||
};
|
||||
export const expectUnion = (value) => {
|
||||
if (value === null || value === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
const asObject = expectObject(value);
|
||||
const setKeys = Object.entries(asObject)
|
||||
.filter(([, v]) => v != null)
|
||||
.map(([k]) => k);
|
||||
if (setKeys.length === 0) {
|
||||
throw new TypeError(`Unions must have exactly one non-null member. None were found.`);
|
||||
}
|
||||
if (setKeys.length > 1) {
|
||||
throw new TypeError(`Unions must have exactly one non-null member. Keys ${setKeys} were not null.`);
|
||||
}
|
||||
return asObject;
|
||||
};
|
||||
export const strictParseDouble = (value) => {
|
||||
if (typeof value == "string") {
|
||||
return expectNumber(parseNumber(value));
|
||||
}
|
||||
return expectNumber(value);
|
||||
};
|
||||
export const strictParseFloat = strictParseDouble;
|
||||
export const strictParseFloat32 = (value) => {
|
||||
if (typeof value == "string") {
|
||||
return expectFloat32(parseNumber(value));
|
||||
}
|
||||
return expectFloat32(value);
|
||||
};
|
||||
const NUMBER_REGEX = /(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)|(-?Infinity)|(NaN)/g;
|
||||
const parseNumber = (value) => {
|
||||
const matches = value.match(NUMBER_REGEX);
|
||||
if (matches === null || matches[0].length !== value.length) {
|
||||
throw new TypeError(`Expected real number, got implicit NaN`);
|
||||
}
|
||||
return parseFloat(value);
|
||||
};
|
||||
export const limitedParseDouble = (value) => {
|
||||
if (typeof value == "string") {
|
||||
return parseFloatString(value);
|
||||
}
|
||||
return expectNumber(value);
|
||||
};
|
||||
export const handleFloat = limitedParseDouble;
|
||||
export const limitedParseFloat = limitedParseDouble;
|
||||
export const limitedParseFloat32 = (value) => {
|
||||
if (typeof value == "string") {
|
||||
return parseFloatString(value);
|
||||
}
|
||||
return expectFloat32(value);
|
||||
};
|
||||
const parseFloatString = (value) => {
|
||||
switch (value) {
|
||||
case "NaN":
|
||||
return NaN;
|
||||
case "Infinity":
|
||||
return Infinity;
|
||||
case "-Infinity":
|
||||
return -Infinity;
|
||||
default:
|
||||
throw new Error(`Unable to parse float value: ${value}`);
|
||||
}
|
||||
};
|
||||
export const strictParseLong = (value) => {
|
||||
if (typeof value === "string") {
|
||||
return expectLong(parseNumber(value));
|
||||
}
|
||||
return expectLong(value);
|
||||
};
|
||||
export const strictParseInt = strictParseLong;
|
||||
export const strictParseInt32 = (value) => {
|
||||
if (typeof value === "string") {
|
||||
return expectInt32(parseNumber(value));
|
||||
}
|
||||
return expectInt32(value);
|
||||
};
|
||||
export const strictParseShort = (value) => {
|
||||
if (typeof value === "string") {
|
||||
return expectShort(parseNumber(value));
|
||||
}
|
||||
return expectShort(value);
|
||||
};
|
||||
export const strictParseByte = (value) => {
|
||||
if (typeof value === "string") {
|
||||
return expectByte(parseNumber(value));
|
||||
}
|
||||
return expectByte(value);
|
||||
};
|
||||
const stackTraceWarning = (message) => {
|
||||
return String(new TypeError(message).stack || message)
|
||||
.split("\n")
|
||||
.slice(0, 5)
|
||||
.filter((s) => !s.includes("stackTraceWarning"))
|
||||
.join("\n");
|
||||
};
|
||||
export const logger = {
|
||||
warn: console.warn,
|
||||
};
|
||||
Generated
Vendored
+6
@@ -0,0 +1,6 @@
|
||||
export function quoteHeader(part) {
|
||||
if (part.includes(",") || part.includes('"')) {
|
||||
part = `"${part.replace(/"/g, '\\"')}"`;
|
||||
}
|
||||
return part;
|
||||
}
|
||||
Generated
Vendored
+27
@@ -0,0 +1,27 @@
|
||||
export function splitEvery(value, delimiter, numDelimiters) {
|
||||
if (numDelimiters <= 0 || !Number.isInteger(numDelimiters)) {
|
||||
throw new Error("Invalid number of delimiters (" + numDelimiters + ") for splitEvery.");
|
||||
}
|
||||
const segments = value.split(delimiter);
|
||||
if (numDelimiters === 1) {
|
||||
return segments;
|
||||
}
|
||||
const compoundSegments = [];
|
||||
let currentSegment = "";
|
||||
for (let i = 0; i < segments.length; i++) {
|
||||
if (currentSegment === "") {
|
||||
currentSegment = segments[i];
|
||||
}
|
||||
else {
|
||||
currentSegment += delimiter + segments[i];
|
||||
}
|
||||
if ((i + 1) % numDelimiters === 0) {
|
||||
compoundSegments.push(currentSegment);
|
||||
currentSegment = "";
|
||||
}
|
||||
}
|
||||
if (currentSegment !== "") {
|
||||
compoundSegments.push(currentSegment);
|
||||
}
|
||||
return compoundSegments;
|
||||
}
|
||||
Generated
Vendored
+37
@@ -0,0 +1,37 @@
|
||||
export const splitHeader = (value) => {
|
||||
const z = value.length;
|
||||
const values = [];
|
||||
let withinQuotes = false;
|
||||
let prevChar = undefined;
|
||||
let anchor = 0;
|
||||
for (let i = 0; i < z; ++i) {
|
||||
const char = value[i];
|
||||
switch (char) {
|
||||
case `"`:
|
||||
if (prevChar !== "\\") {
|
||||
withinQuotes = !withinQuotes;
|
||||
}
|
||||
break;
|
||||
case ",":
|
||||
if (!withinQuotes) {
|
||||
values.push(value.slice(anchor, i));
|
||||
anchor = i + 1;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
}
|
||||
prevChar = char;
|
||||
}
|
||||
values.push(value.slice(anchor));
|
||||
return values.map((v) => {
|
||||
v = v.trim();
|
||||
const z = v.length;
|
||||
if (z < 2) {
|
||||
return v;
|
||||
}
|
||||
if (v[0] === `"` && v[z - 1] === `"`) {
|
||||
v = v.slice(1, z - 1);
|
||||
}
|
||||
return v.replace(/\\"/g, '"');
|
||||
});
|
||||
};
|
||||
Generated
Vendored
+39
@@ -0,0 +1,39 @@
|
||||
export class NumericValue {
|
||||
constructor(string, type) {
|
||||
this.string = string;
|
||||
this.type = type;
|
||||
let dot = 0;
|
||||
for (let i = 0; i < string.length; ++i) {
|
||||
const char = string.charCodeAt(i);
|
||||
if (i === 0 && char === 45) {
|
||||
continue;
|
||||
}
|
||||
if (char === 46) {
|
||||
if (dot) {
|
||||
throw new Error("@smithy/core/serde - NumericValue must contain at most one decimal point.");
|
||||
}
|
||||
dot = 1;
|
||||
continue;
|
||||
}
|
||||
if (char < 48 || char > 57) {
|
||||
throw new Error(`@smithy/core/serde - NumericValue must only contain [0-9], at most one decimal point ".", and an optional negation prefix "-".`);
|
||||
}
|
||||
}
|
||||
}
|
||||
toString() {
|
||||
return this.string;
|
||||
}
|
||||
[Symbol.hasInstance](object) {
|
||||
if (!object || typeof object !== "object") {
|
||||
return false;
|
||||
}
|
||||
const _nv = object;
|
||||
if (typeof _nv.string === "string" && typeof _nv.type === "string" && _nv.constructor?.name === "NumericValue") {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
export function nv(input) {
|
||||
return new NumericValue(String(input), "bigDecimal");
|
||||
}
|
||||
Generated
Vendored
+13
@@ -0,0 +1,13 @@
|
||||
export class DefaultIdentityProviderConfig {
|
||||
constructor(config) {
|
||||
this.authSchemes = new Map();
|
||||
for (const [key, value] of Object.entries(config)) {
|
||||
if (value !== undefined) {
|
||||
this.authSchemes.set(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
getIdentityProvider(schemeId) {
|
||||
return this.authSchemes.get(schemeId);
|
||||
}
|
||||
}
|
||||
Generated
Vendored
+34
@@ -0,0 +1,34 @@
|
||||
import { HttpRequest } from "@smithy/protocol-http";
|
||||
import { HttpApiKeyAuthLocation } from "@smithy/types";
|
||||
export class HttpApiKeyAuthSigner {
|
||||
async sign(httpRequest, identity, signingProperties) {
|
||||
if (!signingProperties) {
|
||||
throw new Error("request could not be signed with `apiKey` since the `name` and `in` signer properties are missing");
|
||||
}
|
||||
if (!signingProperties.name) {
|
||||
throw new Error("request could not be signed with `apiKey` since the `name` signer property is missing");
|
||||
}
|
||||
if (!signingProperties.in) {
|
||||
throw new Error("request could not be signed with `apiKey` since the `in` signer property is missing");
|
||||
}
|
||||
if (!identity.apiKey) {
|
||||
throw new Error("request could not be signed with `apiKey` since the `apiKey` is not defined");
|
||||
}
|
||||
const clonedRequest = HttpRequest.clone(httpRequest);
|
||||
if (signingProperties.in === HttpApiKeyAuthLocation.QUERY) {
|
||||
clonedRequest.query[signingProperties.name] = identity.apiKey;
|
||||
}
|
||||
else if (signingProperties.in === HttpApiKeyAuthLocation.HEADER) {
|
||||
clonedRequest.headers[signingProperties.name] = signingProperties.scheme
|
||||
? `${signingProperties.scheme} ${identity.apiKey}`
|
||||
: identity.apiKey;
|
||||
}
|
||||
else {
|
||||
throw new Error("request can only be signed with `apiKey` locations `query` or `header`, " +
|
||||
"but found: `" +
|
||||
signingProperties.in +
|
||||
"`");
|
||||
}
|
||||
return clonedRequest;
|
||||
}
|
||||
}
|
||||
Generated
Vendored
+11
@@ -0,0 +1,11 @@
|
||||
import { HttpRequest } from "@smithy/protocol-http";
|
||||
export class HttpBearerAuthSigner {
|
||||
async sign(httpRequest, identity, signingProperties) {
|
||||
const clonedRequest = HttpRequest.clone(httpRequest);
|
||||
if (!identity.token) {
|
||||
throw new Error("request could not be signed with `token` since the `token` is not defined");
|
||||
}
|
||||
clonedRequest.headers["Authorization"] = `Bearer ${identity.token}`;
|
||||
return clonedRequest;
|
||||
}
|
||||
}
|
||||
Generated
Vendored
+3
@@ -0,0 +1,3 @@
|
||||
export * from "./httpApiKeyAuth";
|
||||
export * from "./httpBearerAuth";
|
||||
export * from "./noAuth";
|
||||
Generated
Vendored
+5
@@ -0,0 +1,5 @@
|
||||
export class NoAuthSigner {
|
||||
async sign(httpRequest, identity, signingProperties) {
|
||||
return httpRequest;
|
||||
}
|
||||
}
|
||||
Generated
Vendored
+3
@@ -0,0 +1,3 @@
|
||||
export * from "./DefaultIdentityProviderConfig";
|
||||
export * from "./httpAuthSchemes";
|
||||
export * from "./memoizeIdentityProvider";
|
||||
Generated
Vendored
+53
@@ -0,0 +1,53 @@
|
||||
export const createIsIdentityExpiredFunction = (expirationMs) => (identity) => doesIdentityRequireRefresh(identity) && identity.expiration.getTime() - Date.now() < expirationMs;
|
||||
export const EXPIRATION_MS = 300000;
|
||||
export const isIdentityExpired = createIsIdentityExpiredFunction(EXPIRATION_MS);
|
||||
export const doesIdentityRequireRefresh = (identity) => identity.expiration !== undefined;
|
||||
export const memoizeIdentityProvider = (provider, isExpired, requiresRefresh) => {
|
||||
if (provider === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
const normalizedProvider = typeof provider !== "function" ? async () => Promise.resolve(provider) : provider;
|
||||
let resolved;
|
||||
let pending;
|
||||
let hasResult;
|
||||
let isConstant = false;
|
||||
const coalesceProvider = async (options) => {
|
||||
if (!pending) {
|
||||
pending = normalizedProvider(options);
|
||||
}
|
||||
try {
|
||||
resolved = await pending;
|
||||
hasResult = true;
|
||||
isConstant = false;
|
||||
}
|
||||
finally {
|
||||
pending = undefined;
|
||||
}
|
||||
return resolved;
|
||||
};
|
||||
if (isExpired === undefined) {
|
||||
return async (options) => {
|
||||
if (!hasResult || options?.forceRefresh) {
|
||||
resolved = await coalesceProvider(options);
|
||||
}
|
||||
return resolved;
|
||||
};
|
||||
}
|
||||
return async (options) => {
|
||||
if (!hasResult || options?.forceRefresh) {
|
||||
resolved = await coalesceProvider(options);
|
||||
}
|
||||
if (isConstant) {
|
||||
return resolved;
|
||||
}
|
||||
if (!requiresRefresh(resolved)) {
|
||||
isConstant = true;
|
||||
return resolved;
|
||||
}
|
||||
if (isExpired(resolved)) {
|
||||
await coalesceProvider(options);
|
||||
return resolved;
|
||||
}
|
||||
return resolved;
|
||||
};
|
||||
};
|
||||
+5
@@ -0,0 +1,5 @@
|
||||
import { HandlerExecutionContext } from "@smithy/types";
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
export declare const getSmithyContext: (context: HandlerExecutionContext) => Record<string, unknown>;
|
||||
+8
@@ -0,0 +1,8 @@
|
||||
export * from "./getSmithyContext";
|
||||
export * from "./middleware-http-auth-scheme";
|
||||
export * from "./middleware-http-signing";
|
||||
export * from "./normalizeProvider";
|
||||
export { createPaginator } from "./pagination/createPaginator";
|
||||
export * from "./protocols/requestBuilder";
|
||||
export * from "./setFeature";
|
||||
export * from "./util-identity-and-auth";
|
||||
Generated
Vendored
+18
@@ -0,0 +1,18 @@
|
||||
import { HandlerExecutionContext, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, IdentityProviderConfig, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types";
|
||||
import { PreviouslyResolved } from "./httpAuthSchemeMiddleware";
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
export declare const httpAuthSchemeEndpointRuleSetMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions;
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
interface HttpAuthSchemeEndpointRuleSetPluginOptions<TConfig extends object, TContext extends HandlerExecutionContext, TParameters extends HttpAuthSchemeParameters, TInput extends object> {
|
||||
httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider<TConfig, TContext, TParameters, TInput>;
|
||||
identityProviderConfigProvider: (config: TConfig) => Promise<IdentityProviderConfig>;
|
||||
}
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
export declare const getHttpAuthSchemeEndpointRuleSetPlugin: <TConfig extends object, TContext extends HandlerExecutionContext, TParameters extends HttpAuthSchemeParameters, TInput extends object>(config: TConfig & PreviouslyResolved<TParameters>, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }: HttpAuthSchemeEndpointRuleSetPluginOptions<TConfig, TContext, TParameters, TInput>) => Pluggable<any, any>;
|
||||
export {};
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user