Create
Creates job.
- TypeScript
- Python
import {
cloudApi,
decodeMessage,
serviceClients,
Session,
waitForOperation,
} from "@yandex-cloud/nodejs-sdk";
const CreateProjectJobRequest =
cloudApi.datasphere.jobs_project_job_service.CreateProjectJobRequest;
const CreateProjectJobResponse =
cloudApi.datasphere.jobs_project_job_service.CreateProjectJobResponse;
const ExtendedWorkingStorage_StorageType =
cloudApi.datasphere.jobs_jobs.ExtendedWorkingStorage_StorageType;
const FileCompressionType = cloudApi.datasphere.jobs_jobs.FileCompressionType;
(async () => {
const authToken = process.env["YC_OAUTH_TOKEN"];
const session = new Session({ oauthToken: authToken });
const client = session.client(serviceClients.ProjectJobServiceClient);
const operation = await client.create(
CreateProjectJobRequest.fromPartial({
// projectId: "projectId",
// jobParameters: {
// inputFiles: [{
// desc: {
// path: "path",
// var: "var"
// },
// sha256: "sha256",
// sizeBytes: 0,
// compressionType: FileCompressionType.NONE
// }],
// outputFiles: [{
// path: "path",
// var: "var"
// }],
// s3MountIds: ["s3MountIds"],
// datasetIds: ["datasetIds"],
// cmd: "cmd",
// env: {
// vars: {"key": "vars"},
// dockerImageResourceId: "dockerImageResourceId",
// dockerImageSpec: {
// imageUrl: "imageUrl",
// username: "username",
// passwordPlainText: "passwordPlainText",
// passwordDsSecretName: "passwordDsSecretName"
// },
// pythonEnv: {
// condaYaml: "condaYaml",
// localModules: [{
// desc: {
// path: "path",
// var: "var"
// },
// sha256: "sha256",
// sizeBytes: 0,
// compressionType: FileCompressionType.NONE
// }],
// pythonVersion: "pythonVersion",
// requirements: ["requirements"],
// pipOptions: {
// indexUrl: "indexUrl",
// extraIndexUrls: ["extraIndexUrls"],
// trustedHosts: ["trustedHosts"],
// noDeps: true
// }
// }
// },
// attachProjectDisk: true,
// cloudInstanceTypes: [{
// name: "name"
// }],
// extendedWorkingStorage: {
// type: ExtendedWorkingStorage_StorageType.SSD,
// sizeGb: 0
// },
// arguments: [{
// name: "name",
// value: "value"
// }],
// outputDatasets: [{
// name: "name",
// description: "description",
// labels: {"key": "labels"},
// sizeGb: 0,
// var: "var"
// }],
// gracefulShutdownParameters: {
// timeout: {
// seconds: 0,
// nanos: 0
// },
// signal: 0
// }
// },
// config: "config",
// name: "name",
// desc: "desc",
// dataTtl: {
// seconds: 0,
// nanos: 0
// }
})
);
const finishedOp = await waitForOperation(operation, session);
if (finishedOp.response) {
const result = decodeMessage<typeof CreateProjectJobResponse>(
finishedOp.response
);
console.log(result);
}
})();
import os
import grpc
import yandexcloud
from yandex.cloud.datasphere.v2.jobs.jobs_pb2 import Argument
from yandex.cloud.datasphere.v2.jobs.jobs_pb2 import CloudInstanceType
from yandex.cloud.datasphere.v2.jobs.project_job_service_pb2 import CreateProjectJobMetadata
from yandex.cloud.datasphere.v2.jobs.project_job_service_pb2 import CreateProjectJobRequest
from yandex.cloud.datasphere.v2.jobs.project_job_service_pb2 import CreateProjectJobResponse
from yandex.cloud.datasphere.v2.jobs.jobs_pb2 import DockerImageSpec
from yandex.cloud.datasphere.v2.jobs.jobs_pb2 import Environment
from yandex.cloud.datasphere.v2.jobs.jobs_pb2 import ExtendedWorkingStorage
from yandex.cloud.datasphere.v2.jobs.jobs_pb2 import File
from yandex.cloud.datasphere.v2.jobs.jobs_pb2 import FileCompressionType
from yandex.cloud.datasphere.v2.jobs.jobs_pb2 import FileDesc
from yandex.cloud.datasphere.v2.jobs.jobs_pb2 import GracefulShutdownParameters
from yandex.cloud.datasphere.v2.jobs.jobs_pb2 import JobParameters
from yandex.cloud.datasphere.v2.jobs.jobs_pb2 import OutputDatasetDesc
from yandex.cloud.datasphere.v2.jobs.jobs_pb2 import PipOptions
from yandex.cloud.datasphere.v2.jobs.project_job_service_pb2_grpc import ProjectJobServiceStub
from yandex.cloud.datasphere.v2.jobs.jobs_pb2 import PythonEnv
token = os.getenv("YC_OAUTH_TOKEN")
sdk = yandexcloud.SDK(token=token)
service = sdk.client(ProjectJobServiceStub)
operation = service.Create(
CreateProjectJobRequest(
# project_id = "projectId",
# job_parameters = JobParameters(
# input_files = [File(
# desc = FileDesc(
# path = "path",
# var = "var"
# ),
# sha_256 = "sha256",
# size_bytes = 0,
# compression_type = FileCompressionType.NONE
# )],
# output_files = [FileDesc(
# path = "path",
# var = "var"
# )],
# s_3_mount_ids = ["s3MountIds"],
# dataset_ids = ["datasetIds"],
# cmd = "cmd",
# env = Environment(
# vars = {"key": "vars"},
# docker_image_resource_id = "dockerImageResourceId",
# docker_image_spec = DockerImageSpec(
# image_url = "imageUrl",
# username = "username",
# password_plain_text = "passwordPlainText",
# password_ds_secret_name = "passwordDsSecretName"
# ),
# python_env = PythonEnv(
# conda_yaml = "condaYaml",
# local_modules = [File(
# desc = FileDesc(
# path = "path",
# var = "var"
# ),
# sha_256 = "sha256",
# size_bytes = 0,
# compression_type = FileCompressionType.NONE
# )],
# python_version = "pythonVersion",
# requirements = ["requirements"],
# pip_options = PipOptions(
# index_url = "indexUrl",
# extra_index_urls = ["extraIndexUrls"],
# trusted_hosts = ["trustedHosts"],
# no_deps = true
# )
# )
# ),
# attach_project_disk = true,
# cloud_instance_types = [CloudInstanceType(
# name = "name"
# )],
# extended_working_storage = ExtendedWorkingStorage(
# type = ExtendedWorkingStorage.StorageType.SSD,
# size_gb = 0
# ),
# arguments = [Argument(
# name = "name",
# value = "value"
# )],
# output_datasets = [OutputDatasetDesc(
# name = "name",
# description = "description",
# labels = {"key": "labels"},
# size_gb = 0,
# var = "var"
# )],
# graceful_shutdown_parameters = GracefulShutdownParameters(
# timeout = Duration(
# seconds = 0,
# nanos = 0
# ),
# signal = 0
# )
# ),
# config = "config",
# name = "name",
# desc = "desc",
# data_ttl = Duration(
# seconds = 0,
# nanos = 0
# )
)
)
operation_result = sdk.wait_operation_and_get_result(
operation,
response_type=CreateProjectJobResponse,
meta_type=CreateProjectJobMetadata,
)
print(operation_result)
CreateProjectJobRequest
projectId
: string
ID of the project.
jobParameters
: JobParameters
Parameters of the job.
config
: string
Config of the job.
name
: string
Name of the job.
desc
: string
Description of the job.
dataTtl
: google.protobuf.Duration
Job data TTL.
JobParameters
Job parameters.
inputFiles
: File
List of input files.
outputFiles
: FileDesc
List of output files descriptions.
s3MountIds
: string
List of DataSphere S3 mount ids.
datasetIds
: string
List of DataSphere dataset ids.
cmd
: string
Job run command.
env
: Environment
Job environment description.
attachProjectDisk
: bool
Should project disk be attached to VM.
cloudInstanceTypes
: CloudInstanceType
VM specification.
extendedWorkingStorage
: ExtendedWorkingStorage
Extended working storage configuration.
arguments
: Argument
List of literal arguments.
outputDatasets
: OutputDatasetDesc
List of DataSets descriptions to create.
gracefulShutdownParameters
: GracefulShutdownParameters
Graceful shutdown settings.
File
desc
: FileDesc
sha256
: string
SHA256 of the file.
sizeBytes
: int64
File size in bytes.
compressionType
: FileCompressionType
File compression info
FileDesc
path
: string
Path of the file on filesystem.
var
: string
Variable to use in cmd substitution.
Environment
vars
: string
Environment variables.
One of dockerImage
pythonEnv
: PythonEnv
CloudInstanceType
name
: string
Name of DataSphere VM configuration.
ExtendedWorkingStorage
Extended working storage configuration.
StorageType
STORAGE_TYPE_UNSPECIFIED
SSD
type
: StorageType
sizeGb
: int64
Argument
name
: string
value
: string
OutputDatasetDesc
name
: string
Name to create dataset with
description
: string
Description to show in UI
labels
: string
sizeGb
: int64
Size of dataset to create
var
: string
Var name to replace in cmd, like in FileDesc
GracefulShutdownParameters
timeout
: google.protobuf.Duration
signal
: int64
default 15 (SIGTERM)
DockerImageSpec
imageUrl
: string
Docker image URL.
username
: string
Username for container registry.
One of password
Password for container registry.
passwordPlainText
: stringPlaintext password.
passwordDsSecretName
: stringID of DataSphere secret containing password.
PythonEnv
condaYaml
: string
Conda YAML.
localModules
: File
List of local modules descriptions.
pythonVersion
: string
Python version reduced to major.minor
requirements
: string
List of pip requirements
pipOptions
: PipOptions
Pip install options
PipOptions
indexUrl
: string
--index-url option
extraIndexUrls
: string
--extra-index-urls option
trustedHosts
: string
--trusted-hosts option
noDeps
: bool
--no-deps option
Operation
An Operation resource. For more information, see Operation.
id
: string
ID of the operation.
description
: string
Description of the operation. 0-256 characters long.
createdAt
: google.protobuf.Timestamp
Creation timestamp.
createdBy
: string
ID of the user or service account who initiated the operation.
modifiedAt
: google.protobuf.Timestamp
The time when the Operation resource was last modified.
done
: bool
If the value is false
, it means the operation is still in progress.
If true
, the operation is completed, and either error
or response
is available.
metadata
: google.protobuf.Any
Service-specific metadata associated with the operation. It typically contains the ID of the target resource that the operation is performed on. Any method that returns a long-running operation should document the metadata type, if any.
One of result
The operation result.
If done == false
and there was no failure detected, neither error
nor response
is set.
If done == false
and there was a failure detected, error
is set.
If done == true
, exactly one of error
or response
is set.
error
: google.rpc.StatusThe error result of the operation in case of failure or cancellation.
response
: google.protobuf.AnyThe normal response of the operation in case of success.
If the original method returns no data on success, such as Delete, the response is google.protobuf.Empty. If the original method is the standard Create/Update, the response should be the target resource of the operation. Any method that returns a long-running operation should document the response type, if any.