feat(sdk): add Flutter/Dart SDK for Synor Compute
Complete SDK implementation for Flutter and Dart applications: lib/src/types.dart: - Precision, ProcessorType, Priority, JobStatus enums - SynorConfig for client configuration - MatMulOptions, Conv2dOptions, AttentionOptions, InferenceOptions - PricingInfo and UsageStats data classes - SynorException for error handling lib/src/tensor.dart: - Full Tensor class with shape, dtype, and data - Factory constructors: zeros, ones, rand, randn, eye, linspace, arange - Operations: reshape, transpose, flatten - Statistics: sum, mean, std, min, max, argmin, argmax - Element-wise: add, sub, mul, div, scalar ops - Activations: relu, sigmoid, tanh, softmax - JSON serialization with base64-encoded binary data lib/src/job.dart: - JobResult with status, result, timing, and cost - Job class with WebSocket streaming and HTTP polling - JobStatusUpdate for real-time progress tracking - JobBatch for parallel job management lib/src/client.dart: - SynorCompute main client - Operations: matmul, conv2d, attention, elementwise, reduce - LLM inference with streaming support - Tensor upload/download/delete - Job management: submit, cancel, list - Pricing and usage statistics Platform support: Android, iOS, Linux, macOS, Web, Windows
This commit is contained in:
parent
a808bb37a6
commit
62ec3c92da
8 changed files with 2217 additions and 0 deletions
84
sdk/flutter/analysis_options.yaml
Normal file
84
sdk/flutter/analysis_options.yaml
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
include: package:flutter_lints/flutter.yaml
|
||||
|
||||
linter:
|
||||
rules:
|
||||
# Style rules
|
||||
- always_declare_return_types
|
||||
- avoid_empty_else
|
||||
- avoid_relative_lib_imports
|
||||
- avoid_returning_null_for_void
|
||||
- avoid_slow_async_io
|
||||
- avoid_types_as_parameter_names
|
||||
- avoid_unused_constructor_parameters
|
||||
- await_only_futures
|
||||
- camel_case_types
|
||||
- cancel_subscriptions
|
||||
- close_sinks
|
||||
- constant_identifier_names
|
||||
- control_flow_in_finally
|
||||
- curly_braces_in_flow_control_structures
|
||||
- empty_catches
|
||||
- empty_constructor_bodies
|
||||
- empty_statements
|
||||
- hash_and_equals
|
||||
- implementation_imports
|
||||
- library_names
|
||||
- library_prefixes
|
||||
- no_duplicate_case_values
|
||||
- non_constant_identifier_names
|
||||
- null_closures
|
||||
- overridden_fields
|
||||
- package_names
|
||||
- package_prefixed_library_names
|
||||
- prefer_adjacent_string_concatenation
|
||||
- prefer_collection_literals
|
||||
- prefer_conditional_assignment
|
||||
- prefer_const_constructors
|
||||
- prefer_const_declarations
|
||||
- prefer_contains
|
||||
- prefer_final_fields
|
||||
- prefer_final_locals
|
||||
- prefer_for_elements_to_map_fromIterable
|
||||
- prefer_generic_function_type_aliases
|
||||
- prefer_if_null_operators
|
||||
- prefer_initializing_formals
|
||||
- prefer_inlined_adds
|
||||
- prefer_interpolation_to_compose_strings
|
||||
- prefer_is_empty
|
||||
- prefer_is_not_empty
|
||||
- prefer_iterable_whereType
|
||||
- prefer_single_quotes
|
||||
- prefer_spread_collections
|
||||
- prefer_typing_uninitialized_variables
|
||||
- recursive_getters
|
||||
- slash_for_doc_comments
|
||||
- sort_child_properties_last
|
||||
- test_types_in_equals
|
||||
- throw_in_finally
|
||||
- type_init_formals
|
||||
- unawaited_futures
|
||||
- unnecessary_brace_in_string_interps
|
||||
- unnecessary_const
|
||||
- unnecessary_getters_setters
|
||||
- unnecessary_new
|
||||
- unnecessary_null_in_if_null_operators
|
||||
- unnecessary_overrides
|
||||
- unnecessary_parenthesis
|
||||
- unnecessary_statements
|
||||
- unnecessary_string_escapes
|
||||
- unnecessary_string_interpolations
|
||||
- unnecessary_this
|
||||
- unrelated_type_equality_checks
|
||||
- use_function_type_syntax_for_parameters
|
||||
- use_rethrow_when_possible
|
||||
- valid_regexps
|
||||
- void_checks
|
||||
|
||||
analyzer:
|
||||
exclude:
|
||||
- "**/*.g.dart"
|
||||
- "**/*.freezed.dart"
|
||||
errors:
|
||||
missing_required_param: error
|
||||
missing_return: error
|
||||
todo: ignore
|
||||
178
sdk/flutter/example/example.dart
Normal file
178
sdk/flutter/example/example.dart
Normal file
|
|
@ -0,0 +1,178 @@
|
|||
import 'dart:io';
|
||||
|
||||
import 'package:synor_compute/synor_compute.dart';
|
||||
|
||||
/// Example usage of Synor Compute SDK for Flutter/Dart
|
||||
void main() async {
|
||||
// Initialize client with API key
|
||||
final client = SynorCompute(
|
||||
apiKey: Platform.environment['SYNOR_API_KEY'] ?? 'your-api-key',
|
||||
// Optional: customize defaults
|
||||
defaultProcessor: ProcessorType.auto,
|
||||
defaultPrecision: Precision.fp32,
|
||||
defaultPriority: Priority.normal,
|
||||
);
|
||||
|
||||
try {
|
||||
// Check service health
|
||||
final isHealthy = await client.healthCheck();
|
||||
print('Service healthy: $isHealthy\n');
|
||||
|
||||
// Example 1: Matrix multiplication
|
||||
await matrixMultiplicationExample(client);
|
||||
|
||||
// Example 2: Tensor operations
|
||||
await tensorOperationsExample(client);
|
||||
|
||||
// Example 3: LLM inference
|
||||
await llmInferenceExample(client);
|
||||
|
||||
// Example 4: Streaming inference
|
||||
await streamingInferenceExample(client);
|
||||
|
||||
// Example 5: Pricing and usage
|
||||
await pricingExample(client);
|
||||
} finally {
|
||||
// Always dispose client to release resources
|
||||
client.dispose();
|
||||
}
|
||||
}
|
||||
|
||||
/// Matrix multiplication example
|
||||
Future<void> matrixMultiplicationExample(SynorCompute client) async {
|
||||
print('=== Matrix Multiplication ===');
|
||||
|
||||
// Create random matrices
|
||||
final a = Tensor.rand([256, 512]);
|
||||
final b = Tensor.rand([512, 256]);
|
||||
|
||||
print('A: ${a.shape}');
|
||||
print('B: ${b.shape}');
|
||||
|
||||
// Perform multiplication on GPU with FP16 precision
|
||||
final result = await client.matmul(
|
||||
a,
|
||||
b,
|
||||
options: MatMulOptions(
|
||||
precision: Precision.fp16,
|
||||
processor: ProcessorType.gpu,
|
||||
priority: Priority.high,
|
||||
),
|
||||
);
|
||||
|
||||
if (result.isSuccess) {
|
||||
print('Result: ${result.result!.shape}');
|
||||
print('Execution time: ${result.executionTimeMs}ms');
|
||||
print('Cost: \$${result.cost?.toStringAsFixed(6)}');
|
||||
print('Processor: ${result.processor?.value}');
|
||||
} else {
|
||||
print('Error: ${result.error}');
|
||||
}
|
||||
print('');
|
||||
}
|
||||
|
||||
/// Local tensor operations example
|
||||
Future<void> tensorOperationsExample(SynorCompute client) async {
|
||||
print('=== Tensor Operations ===');
|
||||
|
||||
// Create tensors
|
||||
final x = Tensor.randn([100], mean: 0.0, std: 1.0);
|
||||
print('Random normal tensor: mean=${x.mean().toStringAsFixed(4)}, '
|
||||
'std=${x.std().toStringAsFixed(4)}');
|
||||
|
||||
// Create identity matrix
|
||||
final eye = Tensor.eye(4);
|
||||
print('Identity matrix:\n${eye.toNestedList()}');
|
||||
|
||||
// Create linspace
|
||||
final linspace = Tensor.linspace(0, 10, 5);
|
||||
print('Linspace [0, 10, 5]: ${linspace.toNestedList()}');
|
||||
|
||||
// Reshape operations
|
||||
final matrix = Tensor.arange(0, 12).reshape([3, 4]);
|
||||
print('Reshaped [0..12] to [3,4]:\n${matrix.toNestedList()}');
|
||||
|
||||
// Transpose
|
||||
final transposed = matrix.transpose();
|
||||
print('Transposed to ${transposed.shape}');
|
||||
|
||||
// Activations
|
||||
final input = Tensor(shape: [5], data: [-2.0, -1.0, 0.0, 1.0, 2.0]);
|
||||
print('ReLU of $input: ${input.relu().toNestedList()}');
|
||||
print('Sigmoid of $input: ${input.sigmoid().toNestedList()}');
|
||||
|
||||
// Softmax
|
||||
final logits = Tensor(shape: [4], data: [1.0, 2.0, 3.0, 4.0]);
|
||||
print('Softmax of $logits: ${logits.softmax().toNestedList()}');
|
||||
|
||||
print('');
|
||||
}
|
||||
|
||||
/// LLM inference example
|
||||
Future<void> llmInferenceExample(SynorCompute client) async {
|
||||
print('=== LLM Inference ===');
|
||||
|
||||
final result = await client.inference(
|
||||
'llama-3-70b',
|
||||
'What is the capital of France? Answer in one word.',
|
||||
options: InferenceOptions(
|
||||
maxTokens: 10,
|
||||
temperature: 0.1,
|
||||
processor: ProcessorType.lpu, // Use LPU for LLM
|
||||
),
|
||||
);
|
||||
|
||||
if (result.isSuccess) {
|
||||
print('Response: ${result.result}');
|
||||
print('Time: ${result.executionTimeMs}ms');
|
||||
} else {
|
||||
print('Error: ${result.error}');
|
||||
}
|
||||
print('');
|
||||
}
|
||||
|
||||
/// Streaming inference example
|
||||
Future<void> streamingInferenceExample(SynorCompute client) async {
|
||||
print('=== Streaming Inference ===');
|
||||
print('Response: ');
|
||||
|
||||
await for (final token in client.inferenceStream(
|
||||
'llama-3-70b',
|
||||
'Write a short poem about distributed computing.',
|
||||
options: InferenceOptions(
|
||||
maxTokens: 100,
|
||||
temperature: 0.7,
|
||||
),
|
||||
)) {
|
||||
stdout.write(token);
|
||||
}
|
||||
|
||||
print('\n');
|
||||
}
|
||||
|
||||
/// Pricing and usage example
|
||||
Future<void> pricingExample(SynorCompute client) async {
|
||||
print('=== Pricing Information ===');
|
||||
|
||||
final pricing = await client.getPricing();
|
||||
|
||||
print('Current spot prices:');
|
||||
for (final p in pricing) {
|
||||
print(' ${p.processor.value.toUpperCase().padRight(8)}: '
|
||||
'\$${p.pricePerSecond.toStringAsFixed(6)}/sec, '
|
||||
'${p.availableUnits} units available, '
|
||||
'${p.utilizationPercent.toStringAsFixed(1)}% utilized');
|
||||
}
|
||||
|
||||
print('');
|
||||
|
||||
// Get usage stats
|
||||
final usage = await client.getUsage();
|
||||
print('Usage Statistics:');
|
||||
print(' Total jobs: ${usage.totalJobs}');
|
||||
print(' Completed: ${usage.completedJobs}');
|
||||
print(' Failed: ${usage.failedJobs}');
|
||||
print(' Total compute time: ${usage.totalComputeSeconds.toStringAsFixed(2)}s');
|
||||
print(' Total cost: \$${usage.totalCost.toStringAsFixed(4)}');
|
||||
print('');
|
||||
}
|
||||
541
sdk/flutter/lib/src/client.dart
Normal file
541
sdk/flutter/lib/src/client.dart
Normal file
|
|
@ -0,0 +1,541 @@
|
|||
/// Main client for Synor Compute SDK
|
||||
library synor_compute.client;
|
||||
|
||||
import 'dart:async';
|
||||
import 'dart:convert';
|
||||
|
||||
import 'package:http/http.dart' as http;
|
||||
|
||||
import 'job.dart';
|
||||
import 'tensor.dart';
|
||||
import 'types.dart';
|
||||
|
||||
/// Main client for interacting with Synor Compute
|
||||
class SynorCompute {
|
||||
final SynorConfig _config;
|
||||
final http.Client _httpClient;
|
||||
bool _isDisposed = false;
|
||||
|
||||
/// Creates a new Synor Compute client
|
||||
SynorCompute({
|
||||
required String apiKey,
|
||||
String baseUrl = 'https://compute.synor.io',
|
||||
Duration timeout = const Duration(seconds: 30),
|
||||
int maxRetries = 3,
|
||||
ProcessorType defaultProcessor = ProcessorType.auto,
|
||||
Precision defaultPrecision = Precision.fp32,
|
||||
Priority defaultPriority = Priority.normal,
|
||||
http.Client? httpClient,
|
||||
}) : _config = SynorConfig(
|
||||
apiKey: apiKey,
|
||||
baseUrl: baseUrl,
|
||||
timeout: timeout,
|
||||
maxRetries: maxRetries,
|
||||
defaultProcessor: defaultProcessor,
|
||||
defaultPrecision: defaultPrecision,
|
||||
defaultPriority: defaultPriority,
|
||||
),
|
||||
_httpClient = httpClient ?? http.Client();
|
||||
|
||||
/// Creates a client from configuration
|
||||
SynorCompute.fromConfig(SynorConfig config, {http.Client? httpClient})
|
||||
: _config = config,
|
||||
_httpClient = httpClient ?? http.Client();
|
||||
|
||||
Map<String, String> get _headers => {
|
||||
'Authorization': 'Bearer ${_config.apiKey}',
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'application/json',
|
||||
'X-SDK-Version': 'flutter/0.1.0',
|
||||
};
|
||||
|
||||
void _checkDisposed() {
|
||||
if (_isDisposed) {
|
||||
throw StateError('Client has been disposed');
|
||||
}
|
||||
}
|
||||
|
||||
/// Perform matrix multiplication
|
||||
Future<JobResult<Tensor>> matmul(
|
||||
Tensor a,
|
||||
Tensor b, {
|
||||
MatMulOptions? options,
|
||||
}) async {
|
||||
_checkDisposed();
|
||||
|
||||
final opts = options ?? const MatMulOptions();
|
||||
final body = {
|
||||
'operation': 'matmul',
|
||||
'inputs': {
|
||||
'a': a.toJson(),
|
||||
'b': b.toJson(),
|
||||
},
|
||||
'options': {
|
||||
'precision': (opts.precision ?? _config.defaultPrecision).value,
|
||||
'processor': (opts.processor ?? _config.defaultProcessor).value,
|
||||
'priority': (opts.priority ?? _config.defaultPriority).value,
|
||||
'transpose_a': opts.transposeA,
|
||||
'transpose_b': opts.transposeB,
|
||||
},
|
||||
};
|
||||
|
||||
return _submitAndWait<Tensor>(
|
||||
body,
|
||||
(result) => Tensor.fromJson(result as Map<String, dynamic>),
|
||||
);
|
||||
}
|
||||
|
||||
/// Perform 2D convolution
|
||||
Future<JobResult<Tensor>> conv2d(
|
||||
Tensor input,
|
||||
Tensor kernel, {
|
||||
Conv2dOptions? options,
|
||||
}) async {
|
||||
_checkDisposed();
|
||||
|
||||
final opts = options ?? const Conv2dOptions();
|
||||
final body = {
|
||||
'operation': 'conv2d',
|
||||
'inputs': {
|
||||
'input': input.toJson(),
|
||||
'kernel': kernel.toJson(),
|
||||
},
|
||||
'options': {
|
||||
...opts.toJson(),
|
||||
'precision': (opts.precision ?? _config.defaultPrecision).value,
|
||||
'processor': (opts.processor ?? _config.defaultProcessor).value,
|
||||
'priority': (opts.priority ?? _config.defaultPriority).value,
|
||||
},
|
||||
};
|
||||
|
||||
return _submitAndWait<Tensor>(
|
||||
body,
|
||||
(result) => Tensor.fromJson(result as Map<String, dynamic>),
|
||||
);
|
||||
}
|
||||
|
||||
/// Perform flash attention
|
||||
Future<JobResult<Tensor>> attention(
|
||||
Tensor query,
|
||||
Tensor key,
|
||||
Tensor value, {
|
||||
required AttentionOptions options,
|
||||
}) async {
|
||||
_checkDisposed();
|
||||
|
||||
final body = {
|
||||
'operation': 'flash_attention',
|
||||
'inputs': {
|
||||
'query': query.toJson(),
|
||||
'key': key.toJson(),
|
||||
'value': value.toJson(),
|
||||
},
|
||||
'options': {
|
||||
...options.toJson(),
|
||||
'precision': (options.precision ?? _config.defaultPrecision).value,
|
||||
'processor': (options.processor ?? _config.defaultProcessor).value,
|
||||
'priority': (options.priority ?? _config.defaultPriority).value,
|
||||
},
|
||||
};
|
||||
|
||||
return _submitAndWait<Tensor>(
|
||||
body,
|
||||
(result) => Tensor.fromJson(result as Map<String, dynamic>),
|
||||
);
|
||||
}
|
||||
|
||||
/// Run LLM inference
|
||||
Future<JobResult<String>> inference(
|
||||
String model,
|
||||
String input, {
|
||||
InferenceOptions? options,
|
||||
}) async {
|
||||
_checkDisposed();
|
||||
|
||||
final opts = options ?? const InferenceOptions();
|
||||
final body = {
|
||||
'operation': 'inference',
|
||||
'model': model,
|
||||
'input': input,
|
||||
'options': {
|
||||
...opts.toJson(),
|
||||
'processor': (opts.processor ?? _config.defaultProcessor).value,
|
||||
'priority': (opts.priority ?? _config.defaultPriority).value,
|
||||
},
|
||||
};
|
||||
|
||||
return _submitAndWait<String>(
|
||||
body,
|
||||
(result) => result['text'] as String,
|
||||
);
|
||||
}
|
||||
|
||||
/// Stream LLM inference with real-time token output
|
||||
Stream<String> inferenceStream(
|
||||
String model,
|
||||
String input, {
|
||||
InferenceOptions? options,
|
||||
}) async* {
|
||||
_checkDisposed();
|
||||
|
||||
final opts = options ?? const InferenceOptions();
|
||||
final body = {
|
||||
'operation': 'inference',
|
||||
'model': model,
|
||||
'input': input,
|
||||
'options': {
|
||||
...opts.toJson(),
|
||||
'stream': true,
|
||||
'processor': (opts.processor ?? _config.defaultProcessor).value,
|
||||
'priority': (opts.priority ?? _config.defaultPriority).value,
|
||||
},
|
||||
};
|
||||
|
||||
final request = http.Request('POST', Uri.parse('${_config.baseUrl}/stream'))
|
||||
..headers.addAll(_headers)
|
||||
..body = jsonEncode(body);
|
||||
|
||||
final streamedResponse = await _httpClient.send(request);
|
||||
|
||||
if (streamedResponse.statusCode != 200) {
|
||||
final responseBody = await streamedResponse.stream.bytesToString();
|
||||
throw SynorException(
|
||||
'Streaming request failed',
|
||||
statusCode: streamedResponse.statusCode,
|
||||
details: {'response': responseBody},
|
||||
);
|
||||
}
|
||||
|
||||
await for (final chunk in streamedResponse.stream.transform(utf8.decoder)) {
|
||||
// Parse SSE format
|
||||
for (final line in chunk.split('\n')) {
|
||||
if (line.startsWith('data: ')) {
|
||||
final data = line.substring(6);
|
||||
if (data == '[DONE]') return;
|
||||
try {
|
||||
final json = jsonDecode(data) as Map<String, dynamic>;
|
||||
if (json['token'] != null) {
|
||||
yield json['token'] as String;
|
||||
}
|
||||
} catch (e) {
|
||||
// Skip malformed JSON
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Apply element-wise operation
|
||||
Future<JobResult<Tensor>> elementwise(
|
||||
String operation,
|
||||
Tensor input, {
|
||||
Tensor? other,
|
||||
double? scalar,
|
||||
Precision? precision,
|
||||
ProcessorType? processor,
|
||||
Priority? priority,
|
||||
}) async {
|
||||
_checkDisposed();
|
||||
|
||||
final body = {
|
||||
'operation': 'elementwise',
|
||||
'op': operation,
|
||||
'inputs': {
|
||||
'input': input.toJson(),
|
||||
if (other != null) 'other': other.toJson(),
|
||||
if (scalar != null) 'scalar': scalar,
|
||||
},
|
||||
'options': {
|
||||
'precision': (precision ?? _config.defaultPrecision).value,
|
||||
'processor': (processor ?? _config.defaultProcessor).value,
|
||||
'priority': (priority ?? _config.defaultPriority).value,
|
||||
},
|
||||
};
|
||||
|
||||
return _submitAndWait<Tensor>(
|
||||
body,
|
||||
(result) => Tensor.fromJson(result as Map<String, dynamic>),
|
||||
);
|
||||
}
|
||||
|
||||
/// Reduce operation (sum, mean, max, min, etc.)
|
||||
Future<JobResult<Tensor>> reduce(
|
||||
String operation,
|
||||
Tensor input, {
|
||||
List<int>? axes,
|
||||
bool keepDims = false,
|
||||
Precision? precision,
|
||||
ProcessorType? processor,
|
||||
Priority? priority,
|
||||
}) async {
|
||||
_checkDisposed();
|
||||
|
||||
final body = {
|
||||
'operation': 'reduce',
|
||||
'op': operation,
|
||||
'inputs': {
|
||||
'input': input.toJson(),
|
||||
},
|
||||
'options': {
|
||||
if (axes != null) 'axes': axes,
|
||||
'keep_dims': keepDims,
|
||||
'precision': (precision ?? _config.defaultPrecision).value,
|
||||
'processor': (processor ?? _config.defaultProcessor).value,
|
||||
'priority': (priority ?? _config.defaultPriority).value,
|
||||
},
|
||||
};
|
||||
|
||||
return _submitAndWait<Tensor>(
|
||||
body,
|
||||
(result) => Tensor.fromJson(result as Map<String, dynamic>),
|
||||
);
|
||||
}
|
||||
|
||||
/// Submit a custom operation
|
||||
Future<Job<T>> submit<T>(
|
||||
Map<String, dynamic> operation,
|
||||
T Function(dynamic) resultParser,
|
||||
) async {
|
||||
_checkDisposed();
|
||||
|
||||
final response = await _post('/jobs', operation);
|
||||
final jobId = response['job_id'] as String;
|
||||
|
||||
return Job<T>(
|
||||
jobId: jobId,
|
||||
baseUrl: _config.baseUrl,
|
||||
headers: _headers,
|
||||
resultParser: resultParser,
|
||||
);
|
||||
}
|
||||
|
||||
/// Get job by ID
|
||||
Future<JobResult<T>> getJob<T>(
|
||||
String jobId, {
|
||||
T Function(dynamic)? resultParser,
|
||||
}) async {
|
||||
_checkDisposed();
|
||||
|
||||
final response = await _get('/jobs/$jobId');
|
||||
return JobResult<T>.fromJson(response, resultParser);
|
||||
}
|
||||
|
||||
/// Cancel a job
|
||||
Future<bool> cancelJob(String jobId) async {
|
||||
_checkDisposed();
|
||||
|
||||
try {
|
||||
await _post('/jobs/$jobId/cancel', {});
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/// List active jobs
|
||||
Future<List<JobResult<dynamic>>> listJobs({
|
||||
JobStatus? status,
|
||||
int limit = 20,
|
||||
int offset = 0,
|
||||
}) async {
|
||||
_checkDisposed();
|
||||
|
||||
final params = <String, String>{
|
||||
'limit': limit.toString(),
|
||||
'offset': offset.toString(),
|
||||
if (status != null) 'status': status.value,
|
||||
};
|
||||
|
||||
final response = await _get('/jobs', params);
|
||||
final jobs = response['jobs'] as List;
|
||||
return jobs
|
||||
.map((j) => JobResult<dynamic>.fromJson(j as Map<String, dynamic>, null))
|
||||
.toList();
|
||||
}
|
||||
|
||||
/// Get current pricing information
|
||||
Future<List<PricingInfo>> getPricing() async {
|
||||
_checkDisposed();
|
||||
|
||||
final response = await _get('/pricing');
|
||||
final pricing = response['pricing'] as List;
|
||||
return pricing
|
||||
.map((p) => PricingInfo.fromJson(p as Map<String, dynamic>))
|
||||
.toList();
|
||||
}
|
||||
|
||||
/// Get pricing for specific processor
|
||||
Future<PricingInfo> getPricingFor(ProcessorType processor) async {
|
||||
final allPricing = await getPricing();
|
||||
return allPricing.firstWhere(
|
||||
(p) => p.processor == processor,
|
||||
orElse: () => throw SynorException(
|
||||
'No pricing available for processor ${processor.value}',
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
/// Get account usage statistics
|
||||
Future<UsageStats> getUsage({DateTime? from, DateTime? to}) async {
|
||||
_checkDisposed();
|
||||
|
||||
final params = <String, String>{
|
||||
if (from != null) 'from': from.toIso8601String(),
|
||||
if (to != null) 'to': to.toIso8601String(),
|
||||
};
|
||||
|
||||
final response = await _get('/usage', params);
|
||||
return UsageStats.fromJson(response);
|
||||
}
|
||||
|
||||
/// Upload a tensor for reuse
|
||||
Future<String> uploadTensor(Tensor tensor, {String? name}) async {
|
||||
_checkDisposed();
|
||||
|
||||
final body = {
|
||||
'tensor': tensor.toJson(),
|
||||
if (name != null) 'name': name,
|
||||
};
|
||||
|
||||
final response = await _post('/tensors', body);
|
||||
return response['tensor_id'] as String;
|
||||
}
|
||||
|
||||
/// Download a tensor by ID
|
||||
Future<Tensor> downloadTensor(String tensorId) async {
|
||||
_checkDisposed();
|
||||
|
||||
final response = await _get('/tensors/$tensorId');
|
||||
return Tensor.fromJson(response);
|
||||
}
|
||||
|
||||
/// Delete a tensor by ID
|
||||
Future<void> deleteTensor(String tensorId) async {
|
||||
_checkDisposed();
|
||||
|
||||
await _delete('/tensors/$tensorId');
|
||||
}
|
||||
|
||||
/// Health check
|
||||
Future<bool> healthCheck() async {
|
||||
try {
|
||||
final response = await _get('/health');
|
||||
return response['status'] == 'healthy';
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Internal HTTP methods
|
||||
|
||||
Future<JobResult<T>> _submitAndWait<T>(
|
||||
Map<String, dynamic> body,
|
||||
T Function(dynamic) resultParser,
|
||||
) async {
|
||||
final response = await _post('/jobs', body);
|
||||
final jobId = response['job_id'] as String;
|
||||
|
||||
final job = Job<T>(
|
||||
jobId: jobId,
|
||||
baseUrl: _config.baseUrl,
|
||||
headers: _headers,
|
||||
resultParser: resultParser,
|
||||
);
|
||||
|
||||
try {
|
||||
return await _pollJob<T>(jobId, resultParser);
|
||||
} finally {
|
||||
job.dispose();
|
||||
}
|
||||
}
|
||||
|
||||
Future<JobResult<T>> _pollJob<T>(
|
||||
String jobId,
|
||||
T Function(dynamic) resultParser, {
|
||||
Duration interval = const Duration(milliseconds: 500),
|
||||
Duration timeout = const Duration(minutes: 5),
|
||||
}) async {
|
||||
final endTime = DateTime.now().add(timeout);
|
||||
|
||||
while (DateTime.now().isBefore(endTime)) {
|
||||
final response = await _get('/jobs/$jobId');
|
||||
final result = JobResult<T>.fromJson(response, resultParser);
|
||||
|
||||
if (result.status.isTerminal) {
|
||||
return result;
|
||||
}
|
||||
|
||||
await Future.delayed(interval);
|
||||
}
|
||||
|
||||
throw SynorException('Job polling timed out after $timeout');
|
||||
}
|
||||
|
||||
Future<Map<String, dynamic>> _get(
|
||||
String path, [
|
||||
Map<String, String>? queryParams,
|
||||
]) async {
|
||||
var uri = Uri.parse('${_config.baseUrl}$path');
|
||||
if (queryParams != null && queryParams.isNotEmpty) {
|
||||
uri = uri.replace(queryParameters: queryParams);
|
||||
}
|
||||
|
||||
final response = await _httpClient
|
||||
.get(uri, headers: _headers)
|
||||
.timeout(_config.timeout);
|
||||
|
||||
return _handleResponse(response);
|
||||
}
|
||||
|
||||
Future<Map<String, dynamic>> _post(
|
||||
String path,
|
||||
Map<String, dynamic> body,
|
||||
) async {
|
||||
final uri = Uri.parse('${_config.baseUrl}$path');
|
||||
final response = await _httpClient
|
||||
.post(uri, headers: _headers, body: jsonEncode(body))
|
||||
.timeout(_config.timeout);
|
||||
|
||||
return _handleResponse(response);
|
||||
}
|
||||
|
||||
Future<void> _delete(String path) async {
|
||||
final uri = Uri.parse('${_config.baseUrl}$path');
|
||||
final response = await _httpClient
|
||||
.delete(uri, headers: _headers)
|
||||
.timeout(_config.timeout);
|
||||
|
||||
if (response.statusCode != 200 && response.statusCode != 204) {
|
||||
_handleResponse(response);
|
||||
}
|
||||
}
|
||||
|
||||
Map<String, dynamic> _handleResponse(http.Response response) {
|
||||
if (response.statusCode >= 200 && response.statusCode < 300) {
|
||||
if (response.body.isEmpty) {
|
||||
return {};
|
||||
}
|
||||
return jsonDecode(response.body) as Map<String, dynamic>;
|
||||
}
|
||||
|
||||
Map<String, dynamic>? errorBody;
|
||||
try {
|
||||
errorBody = jsonDecode(response.body) as Map<String, dynamic>;
|
||||
} catch (e) {
|
||||
// Body is not JSON
|
||||
}
|
||||
|
||||
throw SynorException(
|
||||
errorBody?['message'] as String? ?? 'Request failed',
|
||||
code: errorBody?['code'] as String?,
|
||||
statusCode: response.statusCode,
|
||||
details: errorBody,
|
||||
);
|
||||
}
|
||||
|
||||
/// Dispose the client and release resources
|
||||
void dispose() {
|
||||
_isDisposed = true;
|
||||
_httpClient.close();
|
||||
}
|
||||
}
|
||||
394
sdk/flutter/lib/src/job.dart
Normal file
394
sdk/flutter/lib/src/job.dart
Normal file
|
|
@ -0,0 +1,394 @@
|
|||
/// Job tracking for Synor Compute SDK
|
||||
library synor_compute.job;
|
||||
|
||||
import 'dart:async';
|
||||
import 'dart:convert';
|
||||
|
||||
import 'package:web_socket_channel/web_socket_channel.dart';
|
||||
|
||||
import 'tensor.dart';
|
||||
import 'types.dart';
|
||||
|
||||
/// Result of a compute job
|
||||
class JobResult<T> {
|
||||
/// Unique job identifier
|
||||
final String jobId;
|
||||
|
||||
/// Current job status
|
||||
final JobStatus status;
|
||||
|
||||
/// Result data (if completed)
|
||||
final T? result;
|
||||
|
||||
/// Error message (if failed)
|
||||
final String? error;
|
||||
|
||||
/// Execution time in milliseconds
|
||||
final int? executionTimeMs;
|
||||
|
||||
/// Cost in credits
|
||||
final double? cost;
|
||||
|
||||
/// Processor that executed the job
|
||||
final ProcessorType? processor;
|
||||
|
||||
/// Metadata from execution
|
||||
final Map<String, dynamic>? metadata;
|
||||
|
||||
const JobResult({
|
||||
required this.jobId,
|
||||
required this.status,
|
||||
this.result,
|
||||
this.error,
|
||||
this.executionTimeMs,
|
||||
this.cost,
|
||||
this.processor,
|
||||
this.metadata,
|
||||
});
|
||||
|
||||
/// Whether the job completed successfully
|
||||
bool get isSuccess => status == JobStatus.completed && result != null;
|
||||
|
||||
/// Whether the job failed
|
||||
bool get isFailed => status == JobStatus.failed;
|
||||
|
||||
/// Whether the job is still running
|
||||
bool get isRunning => !status.isTerminal;
|
||||
|
||||
factory JobResult.fromJson(
|
||||
Map<String, dynamic> json,
|
||||
T Function(dynamic)? resultParser,
|
||||
) {
|
||||
final status = JobStatus.fromString(json['status'] as String);
|
||||
T? result;
|
||||
|
||||
if (json['result'] != null && resultParser != null) {
|
||||
result = resultParser(json['result']);
|
||||
}
|
||||
|
||||
return JobResult<T>(
|
||||
jobId: json['job_id'] as String,
|
||||
status: status,
|
||||
result: result,
|
||||
error: json['error'] as String?,
|
||||
executionTimeMs: json['execution_time_ms'] as int?,
|
||||
cost: (json['cost'] as num?)?.toDouble(),
|
||||
processor: json['processor'] != null
|
||||
? ProcessorType.fromString(json['processor'] as String)
|
||||
: null,
|
||||
metadata: json['metadata'] as Map<String, dynamic>?,
|
||||
);
|
||||
}
|
||||
|
||||
/// Transform the result to a different type
|
||||
JobResult<R> map<R>(R Function(T) transform) {
|
||||
return JobResult<R>(
|
||||
jobId: jobId,
|
||||
status: status,
|
||||
result: result != null ? transform(result as T) : null,
|
||||
error: error,
|
||||
executionTimeMs: executionTimeMs,
|
||||
cost: cost,
|
||||
processor: processor,
|
||||
metadata: metadata,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
if (isSuccess) {
|
||||
return 'JobResult(id: $jobId, status: ${status.value}, '
|
||||
'time: ${executionTimeMs}ms, cost: \$${cost?.toStringAsFixed(6)})';
|
||||
} else if (isFailed) {
|
||||
return 'JobResult(id: $jobId, status: ${status.value}, error: $error)';
|
||||
}
|
||||
return 'JobResult(id: $jobId, status: ${status.value})';
|
||||
}
|
||||
}
|
||||
|
||||
/// Job status update event
|
||||
class JobStatusUpdate {
|
||||
final String jobId;
|
||||
final JobStatus status;
|
||||
final double? progress;
|
||||
final String? message;
|
||||
final DateTime timestamp;
|
||||
|
||||
const JobStatusUpdate({
|
||||
required this.jobId,
|
||||
required this.status,
|
||||
this.progress,
|
||||
this.message,
|
||||
required this.timestamp,
|
||||
});
|
||||
|
||||
factory JobStatusUpdate.fromJson(Map<String, dynamic> json) {
|
||||
return JobStatusUpdate(
|
||||
jobId: json['job_id'] as String,
|
||||
status: JobStatus.fromString(json['status'] as String),
|
||||
progress: (json['progress'] as num?)?.toDouble(),
|
||||
message: json['message'] as String?,
|
||||
timestamp: json['timestamp'] != null
|
||||
? DateTime.parse(json['timestamp'] as String)
|
||||
: DateTime.now(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Job handle for tracking and managing a submitted job
|
||||
class Job<T> {
|
||||
final String jobId;
|
||||
final String _baseUrl;
|
||||
final Map<String, String> _headers;
|
||||
final T Function(dynamic)? _resultParser;
|
||||
|
||||
WebSocketChannel? _wsChannel;
|
||||
StreamController<JobStatusUpdate>? _statusController;
|
||||
JobResult<T>? _cachedResult;
|
||||
bool _isDisposed = false;
|
||||
|
||||
Job({
|
||||
required this.jobId,
|
||||
required String baseUrl,
|
||||
required Map<String, String> headers,
|
||||
T Function(dynamic)? resultParser,
|
||||
}) : _baseUrl = baseUrl,
|
||||
_headers = headers,
|
||||
_resultParser = resultParser;
|
||||
|
||||
/// Stream of status updates for this job
|
||||
Stream<JobStatusUpdate> get statusUpdates {
|
||||
_statusController ??= StreamController<JobStatusUpdate>.broadcast(
|
||||
onListen: _startWebSocket,
|
||||
onCancel: _stopWebSocket,
|
||||
);
|
||||
return _statusController!.stream;
|
||||
}
|
||||
|
||||
void _startWebSocket() {
|
||||
if (_isDisposed) return;
|
||||
|
||||
final wsUrl = _baseUrl
|
||||
.replaceFirst('http://', 'ws://')
|
||||
.replaceFirst('https://', 'wss://');
|
||||
|
||||
_wsChannel = WebSocketChannel.connect(
|
||||
Uri.parse('$wsUrl/jobs/$jobId/stream'),
|
||||
);
|
||||
|
||||
_wsChannel!.stream.listen(
|
||||
(data) {
|
||||
if (_isDisposed) return;
|
||||
try {
|
||||
final json = jsonDecode(data as String) as Map<String, dynamic>;
|
||||
final update = JobStatusUpdate.fromJson(json);
|
||||
_statusController?.add(update);
|
||||
|
||||
if (update.status.isTerminal) {
|
||||
_stopWebSocket();
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore parse errors
|
||||
}
|
||||
},
|
||||
onError: (error) {
|
||||
if (!_isDisposed) {
|
||||
_statusController?.addError(error);
|
||||
}
|
||||
},
|
||||
onDone: _stopWebSocket,
|
||||
);
|
||||
}
|
||||
|
||||
void _stopWebSocket() {
|
||||
_wsChannel?.sink.close();
|
||||
_wsChannel = null;
|
||||
}
|
||||
|
||||
/// Poll for job result (for environments without WebSocket support)
|
||||
Future<JobResult<T>> poll({
|
||||
Duration interval = const Duration(milliseconds: 500),
|
||||
Duration timeout = const Duration(minutes: 5),
|
||||
}) async {
|
||||
if (_cachedResult?.status.isTerminal == true) {
|
||||
return _cachedResult!;
|
||||
}
|
||||
|
||||
final endTime = DateTime.now().add(timeout);
|
||||
final client = _createHttpClient();
|
||||
|
||||
try {
|
||||
while (DateTime.now().isBefore(endTime)) {
|
||||
final response = await client.get(
|
||||
Uri.parse('$_baseUrl/jobs/$jobId'),
|
||||
headers: _headers,
|
||||
);
|
||||
|
||||
if (response.statusCode != 200) {
|
||||
throw SynorException(
|
||||
'Failed to poll job status',
|
||||
statusCode: response.statusCode,
|
||||
);
|
||||
}
|
||||
|
||||
final json = jsonDecode(response.body) as Map<String, dynamic>;
|
||||
final result = JobResult<T>.fromJson(json, _resultParser);
|
||||
|
||||
if (result.status.isTerminal) {
|
||||
_cachedResult = result;
|
||||
return result;
|
||||
}
|
||||
|
||||
await Future.delayed(interval);
|
||||
}
|
||||
|
||||
throw SynorException('Job polling timed out after $timeout');
|
||||
} finally {
|
||||
client.close();
|
||||
}
|
||||
}
|
||||
|
||||
/// Wait for job completion with automatic strategy selection
|
||||
Future<JobResult<T>> wait({
|
||||
Duration timeout = const Duration(minutes: 5),
|
||||
bool useWebSocket = true,
|
||||
}) async {
|
||||
if (_cachedResult?.status.isTerminal == true) {
|
||||
return _cachedResult!;
|
||||
}
|
||||
|
||||
if (useWebSocket) {
|
||||
try {
|
||||
final completer = Completer<JobResult<T>>();
|
||||
late StreamSubscription<JobStatusUpdate> subscription;
|
||||
|
||||
subscription = statusUpdates.listen(
|
||||
(update) async {
|
||||
if (update.status.isTerminal && !completer.isCompleted) {
|
||||
final result = await poll(
|
||||
interval: Duration.zero,
|
||||
timeout: const Duration(seconds: 10),
|
||||
);
|
||||
completer.complete(result);
|
||||
await subscription.cancel();
|
||||
}
|
||||
},
|
||||
onError: (error) {
|
||||
if (!completer.isCompleted) {
|
||||
completer.completeError(error);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
return await completer.future.timeout(
|
||||
timeout,
|
||||
onTimeout: () {
|
||||
subscription.cancel();
|
||||
throw SynorException('Job wait timed out after $timeout');
|
||||
},
|
||||
);
|
||||
} catch (e) {
|
||||
// Fall back to polling if WebSocket fails
|
||||
return poll(timeout: timeout);
|
||||
}
|
||||
}
|
||||
|
||||
return poll(timeout: timeout);
|
||||
}
|
||||
|
||||
/// Cancel the job
|
||||
Future<bool> cancel() async {
|
||||
final client = _createHttpClient();
|
||||
try {
|
||||
final response = await client.post(
|
||||
Uri.parse('$_baseUrl/jobs/$jobId/cancel'),
|
||||
headers: _headers,
|
||||
);
|
||||
|
||||
if (response.statusCode == 200) {
|
||||
_cachedResult = JobResult<T>(
|
||||
jobId: jobId,
|
||||
status: JobStatus.cancelled,
|
||||
);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
} finally {
|
||||
client.close();
|
||||
}
|
||||
}
|
||||
|
||||
/// Get current job status
|
||||
Future<JobStatus> getStatus() async {
|
||||
final client = _createHttpClient();
|
||||
try {
|
||||
final response = await client.get(
|
||||
Uri.parse('$_baseUrl/jobs/$jobId/status'),
|
||||
headers: _headers,
|
||||
);
|
||||
|
||||
if (response.statusCode != 200) {
|
||||
throw SynorException(
|
||||
'Failed to get job status',
|
||||
statusCode: response.statusCode,
|
||||
);
|
||||
}
|
||||
|
||||
final json = jsonDecode(response.body) as Map<String, dynamic>;
|
||||
return JobStatus.fromString(json['status'] as String);
|
||||
} finally {
|
||||
client.close();
|
||||
}
|
||||
}
|
||||
|
||||
/// Dispose resources
|
||||
void dispose() {
|
||||
_isDisposed = true;
|
||||
_stopWebSocket();
|
||||
_statusController?.close();
|
||||
_statusController = null;
|
||||
}
|
||||
|
||||
// Creates an HTTP client - in a real app, use http package
|
||||
dynamic _createHttpClient() {
|
||||
// This is a placeholder - actual implementation uses http package
|
||||
throw UnimplementedError('HTTP client should be injected');
|
||||
}
|
||||
}
|
||||
|
||||
/// Batch job operations
|
||||
class JobBatch<T> {
|
||||
final List<Job<T>> jobs;
|
||||
|
||||
JobBatch(this.jobs);
|
||||
|
||||
/// Wait for all jobs to complete
|
||||
Future<List<JobResult<T>>> waitAll({
|
||||
Duration timeout = const Duration(minutes: 10),
|
||||
}) async {
|
||||
return Future.wait(
|
||||
jobs.map((job) => job.wait(timeout: timeout)),
|
||||
);
|
||||
}
|
||||
|
||||
/// Wait for first job to complete
|
||||
Future<JobResult<T>> waitAny({
|
||||
Duration timeout = const Duration(minutes: 5),
|
||||
}) async {
|
||||
return Future.any(
|
||||
jobs.map((job) => job.wait(timeout: timeout)),
|
||||
);
|
||||
}
|
||||
|
||||
/// Cancel all jobs
|
||||
Future<void> cancelAll() async {
|
||||
await Future.wait(jobs.map((job) => job.cancel()));
|
||||
}
|
||||
|
||||
/// Dispose all job resources
|
||||
void dispose() {
|
||||
for (final job in jobs) {
|
||||
job.dispose();
|
||||
}
|
||||
}
|
||||
}
|
||||
520
sdk/flutter/lib/src/tensor.dart
Normal file
520
sdk/flutter/lib/src/tensor.dart
Normal file
|
|
@ -0,0 +1,520 @@
|
|||
/// Tensor implementation for Synor Compute SDK
|
||||
library synor_compute.tensor;
|
||||
|
||||
import 'dart:convert';
|
||||
import 'dart:math' as math;
|
||||
import 'dart:typed_data';
|
||||
|
||||
import 'types.dart';
|
||||
|
||||
/// Multi-dimensional tensor for compute operations
|
||||
class Tensor {
|
||||
/// Tensor shape (dimensions)
|
||||
final List<int> shape;
|
||||
|
||||
/// Underlying data as Float64List
|
||||
final Float64List data;
|
||||
|
||||
/// Data type
|
||||
final DType dtype;
|
||||
|
||||
/// Unique identifier (assigned by server)
|
||||
final String? id;
|
||||
|
||||
/// Creates a tensor with given shape and data
|
||||
Tensor({
|
||||
required this.shape,
|
||||
required List<double> data,
|
||||
this.dtype = DType.float64,
|
||||
this.id,
|
||||
}) : data = Float64List.fromList(data) {
|
||||
final expectedSize = shape.fold<int>(1, (a, b) => a * b);
|
||||
if (this.data.length != expectedSize) {
|
||||
throw ArgumentError(
|
||||
'Data length ${this.data.length} does not match shape $shape '
|
||||
'(expected $expectedSize elements)',
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a tensor from Float64List
|
||||
Tensor.fromTypedData({
|
||||
required this.shape,
|
||||
required this.data,
|
||||
this.dtype = DType.float64,
|
||||
this.id,
|
||||
}) {
|
||||
final expectedSize = shape.fold<int>(1, (a, b) => a * b);
|
||||
if (data.length != expectedSize) {
|
||||
throw ArgumentError(
|
||||
'Data length ${data.length} does not match shape $shape '
|
||||
'(expected $expectedSize elements)',
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a tensor filled with zeros
|
||||
factory Tensor.zeros(List<int> shape, {DType dtype = DType.float64}) {
|
||||
final size = shape.fold<int>(1, (a, b) => a * b);
|
||||
return Tensor(
|
||||
shape: shape,
|
||||
data: List.filled(size, 0.0),
|
||||
dtype: dtype,
|
||||
);
|
||||
}
|
||||
|
||||
/// Creates a tensor filled with ones
|
||||
factory Tensor.ones(List<int> shape, {DType dtype = DType.float64}) {
|
||||
final size = shape.fold<int>(1, (a, b) => a * b);
|
||||
return Tensor(
|
||||
shape: shape,
|
||||
data: List.filled(size, 1.0),
|
||||
dtype: dtype,
|
||||
);
|
||||
}
|
||||
|
||||
/// Creates a tensor filled with a specific value
|
||||
factory Tensor.full(
|
||||
List<int> shape,
|
||||
double value, {
|
||||
DType dtype = DType.float64,
|
||||
}) {
|
||||
final size = shape.fold<int>(1, (a, b) => a * b);
|
||||
return Tensor(
|
||||
shape: shape,
|
||||
data: List.filled(size, value),
|
||||
dtype: dtype,
|
||||
);
|
||||
}
|
||||
|
||||
/// Creates a tensor with random values from uniform distribution [0, 1)
|
||||
factory Tensor.rand(List<int> shape, {DType dtype = DType.float64}) {
|
||||
final size = shape.fold<int>(1, (a, b) => a * b);
|
||||
final random = math.Random();
|
||||
return Tensor(
|
||||
shape: shape,
|
||||
data: List.generate(size, (_) => random.nextDouble()),
|
||||
dtype: dtype,
|
||||
);
|
||||
}
|
||||
|
||||
/// Creates a tensor with random values from normal distribution
|
||||
factory Tensor.randn(
|
||||
List<int> shape, {
|
||||
double mean = 0.0,
|
||||
double std = 1.0,
|
||||
DType dtype = DType.float64,
|
||||
}) {
|
||||
final size = shape.fold<int>(1, (a, b) => a * b);
|
||||
final random = math.Random();
|
||||
|
||||
// Box-Muller transform for normal distribution
|
||||
double nextGaussian() {
|
||||
final u1 = random.nextDouble();
|
||||
final u2 = random.nextDouble();
|
||||
return math.sqrt(-2 * math.log(u1)) * math.cos(2 * math.pi * u2);
|
||||
}
|
||||
|
||||
return Tensor(
|
||||
shape: shape,
|
||||
data: List.generate(size, (_) => mean + std * nextGaussian()),
|
||||
dtype: dtype,
|
||||
);
|
||||
}
|
||||
|
||||
/// Creates an identity matrix
|
||||
factory Tensor.eye(int n, {DType dtype = DType.float64}) {
|
||||
final data = List.filled(n * n, 0.0);
|
||||
for (var i = 0; i < n; i++) {
|
||||
data[i * n + i] = 1.0;
|
||||
}
|
||||
return Tensor(shape: [n, n], data: data, dtype: dtype);
|
||||
}
|
||||
|
||||
/// Creates a tensor with evenly spaced values
|
||||
factory Tensor.linspace(
|
||||
double start,
|
||||
double end,
|
||||
int steps, {
|
||||
DType dtype = DType.float64,
|
||||
}) {
|
||||
if (steps < 2) {
|
||||
throw ArgumentError('Steps must be at least 2');
|
||||
}
|
||||
final step = (end - start) / (steps - 1);
|
||||
return Tensor(
|
||||
shape: [steps],
|
||||
data: List.generate(steps, (i) => start + i * step),
|
||||
dtype: dtype,
|
||||
);
|
||||
}
|
||||
|
||||
/// Creates a tensor with values in a range
|
||||
factory Tensor.arange(
|
||||
double start,
|
||||
double end, {
|
||||
double step = 1.0,
|
||||
DType dtype = DType.float64,
|
||||
}) {
|
||||
final data = <double>[];
|
||||
for (var v = start; v < end; v += step) {
|
||||
data.add(v);
|
||||
}
|
||||
return Tensor(shape: [data.length], data: data, dtype: dtype);
|
||||
}
|
||||
|
||||
/// Creates a tensor from JSON
|
||||
factory Tensor.fromJson(Map<String, dynamic> json) {
|
||||
final shape = (json['shape'] as List).cast<int>();
|
||||
final rawData = json['data'];
|
||||
|
||||
List<double> data;
|
||||
if (rawData is String) {
|
||||
// Base64-encoded binary data
|
||||
final bytes = base64Decode(rawData);
|
||||
data = Float64List.view(bytes.buffer).toList();
|
||||
} else if (rawData is List) {
|
||||
data = _flattenList(rawData);
|
||||
} else {
|
||||
throw ArgumentError('Invalid tensor data format');
|
||||
}
|
||||
|
||||
return Tensor(
|
||||
shape: shape,
|
||||
data: data,
|
||||
dtype: DType.fromString(json['dtype'] as String? ?? 'float64'),
|
||||
id: json['id'] as String?,
|
||||
);
|
||||
}
|
||||
|
||||
/// Flattens a nested list to 1D
|
||||
static List<double> _flattenList(List<dynamic> nested) {
|
||||
final result = <double>[];
|
||||
void flatten(dynamic item) {
|
||||
if (item is List) {
|
||||
for (final e in item) {
|
||||
flatten(e);
|
||||
}
|
||||
} else if (item is num) {
|
||||
result.add(item.toDouble());
|
||||
}
|
||||
}
|
||||
flatten(nested);
|
||||
return result;
|
||||
}
|
||||
|
||||
/// Number of dimensions
|
||||
int get ndim => shape.length;
|
||||
|
||||
/// Total number of elements
|
||||
int get size => data.length;
|
||||
|
||||
/// Number of bytes
|
||||
int get nbytes => data.lengthInBytes;
|
||||
|
||||
/// Get element at index (for 1D tensors)
|
||||
double operator [](int index) {
|
||||
if (ndim != 1) {
|
||||
throw StateError('Use at() for multi-dimensional indexing');
|
||||
}
|
||||
return data[index];
|
||||
}
|
||||
|
||||
/// Get element at multi-dimensional index
|
||||
double at(List<int> indices) {
|
||||
if (indices.length != ndim) {
|
||||
throw ArgumentError(
|
||||
'Expected $ndim indices, got ${indices.length}',
|
||||
);
|
||||
}
|
||||
var flatIndex = 0;
|
||||
var stride = 1;
|
||||
for (var i = ndim - 1; i >= 0; i--) {
|
||||
if (indices[i] < 0 || indices[i] >= shape[i]) {
|
||||
throw RangeError('Index ${indices[i]} out of bounds for axis $i '
|
||||
'with size ${shape[i]}');
|
||||
}
|
||||
flatIndex += indices[i] * stride;
|
||||
stride *= shape[i];
|
||||
}
|
||||
return data[flatIndex];
|
||||
}
|
||||
|
||||
/// Reshape tensor to new shape
|
||||
Tensor reshape(List<int> newShape) {
|
||||
final newSize = newShape.fold<int>(1, (a, b) => a * b);
|
||||
if (newSize != size) {
|
||||
throw ArgumentError(
|
||||
'Cannot reshape tensor of size $size to shape $newShape '
|
||||
'(size $newSize)',
|
||||
);
|
||||
}
|
||||
return Tensor.fromTypedData(
|
||||
shape: newShape,
|
||||
data: data,
|
||||
dtype: dtype,
|
||||
id: id,
|
||||
);
|
||||
}
|
||||
|
||||
/// Flatten tensor to 1D
|
||||
Tensor flatten() => reshape([size]);
|
||||
|
||||
/// Transpose tensor (swap last two dimensions)
|
||||
Tensor transpose() {
|
||||
if (ndim < 2) {
|
||||
return this;
|
||||
}
|
||||
|
||||
final newShape = List<int>.from(shape);
|
||||
final tmp = newShape[ndim - 1];
|
||||
newShape[ndim - 1] = newShape[ndim - 2];
|
||||
newShape[ndim - 2] = tmp;
|
||||
|
||||
final newData = Float64List(size);
|
||||
final rows = shape[ndim - 2];
|
||||
final cols = shape[ndim - 1];
|
||||
final batchSize = size ~/ (rows * cols);
|
||||
|
||||
for (var b = 0; b < batchSize; b++) {
|
||||
final offset = b * rows * cols;
|
||||
for (var i = 0; i < rows; i++) {
|
||||
for (var j = 0; j < cols; j++) {
|
||||
newData[offset + j * rows + i] = data[offset + i * cols + j];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Tensor.fromTypedData(
|
||||
shape: newShape,
|
||||
data: newData,
|
||||
dtype: dtype,
|
||||
);
|
||||
}
|
||||
|
||||
/// Sum of all elements
|
||||
double sum() => data.fold(0.0, (a, b) => a + b);
|
||||
|
||||
/// Mean of all elements
|
||||
double mean() => sum() / size;
|
||||
|
||||
/// Standard deviation of all elements
|
||||
double std() {
|
||||
final m = mean();
|
||||
final variance = data.fold(0.0, (sum, x) => sum + (x - m) * (x - m)) / size;
|
||||
return math.sqrt(variance);
|
||||
}
|
||||
|
||||
/// Minimum value
|
||||
double min() => data.reduce(math.min);
|
||||
|
||||
/// Maximum value
|
||||
double max() => data.reduce(math.max);
|
||||
|
||||
/// Index of minimum value
|
||||
int argmin() {
|
||||
var minIdx = 0;
|
||||
var minVal = data[0];
|
||||
for (var i = 1; i < size; i++) {
|
||||
if (data[i] < minVal) {
|
||||
minVal = data[i];
|
||||
minIdx = i;
|
||||
}
|
||||
}
|
||||
return minIdx;
|
||||
}
|
||||
|
||||
/// Index of maximum value
|
||||
int argmax() {
|
||||
var maxIdx = 0;
|
||||
var maxVal = data[0];
|
||||
for (var i = 1; i < size; i++) {
|
||||
if (data[i] > maxVal) {
|
||||
maxVal = data[i];
|
||||
maxIdx = i;
|
||||
}
|
||||
}
|
||||
return maxIdx;
|
||||
}
|
||||
|
||||
/// Element-wise addition
|
||||
Tensor add(Tensor other) {
|
||||
_checkShapesMatch(other);
|
||||
final result = Float64List(size);
|
||||
for (var i = 0; i < size; i++) {
|
||||
result[i] = data[i] + other.data[i];
|
||||
}
|
||||
return Tensor.fromTypedData(shape: shape, data: result, dtype: dtype);
|
||||
}
|
||||
|
||||
/// Element-wise subtraction
|
||||
Tensor sub(Tensor other) {
|
||||
_checkShapesMatch(other);
|
||||
final result = Float64List(size);
|
||||
for (var i = 0; i < size; i++) {
|
||||
result[i] = data[i] - other.data[i];
|
||||
}
|
||||
return Tensor.fromTypedData(shape: shape, data: result, dtype: dtype);
|
||||
}
|
||||
|
||||
/// Element-wise multiplication
|
||||
Tensor mul(Tensor other) {
|
||||
_checkShapesMatch(other);
|
||||
final result = Float64List(size);
|
||||
for (var i = 0; i < size; i++) {
|
||||
result[i] = data[i] * other.data[i];
|
||||
}
|
||||
return Tensor.fromTypedData(shape: shape, data: result, dtype: dtype);
|
||||
}
|
||||
|
||||
/// Element-wise division
|
||||
Tensor div(Tensor other) {
|
||||
_checkShapesMatch(other);
|
||||
final result = Float64List(size);
|
||||
for (var i = 0; i < size; i++) {
|
||||
result[i] = data[i] / other.data[i];
|
||||
}
|
||||
return Tensor.fromTypedData(shape: shape, data: result, dtype: dtype);
|
||||
}
|
||||
|
||||
/// Scalar operations
|
||||
Tensor addScalar(double scalar) {
|
||||
final result = Float64List(size);
|
||||
for (var i = 0; i < size; i++) {
|
||||
result[i] = data[i] + scalar;
|
||||
}
|
||||
return Tensor.fromTypedData(shape: shape, data: result, dtype: dtype);
|
||||
}
|
||||
|
||||
Tensor mulScalar(double scalar) {
|
||||
final result = Float64List(size);
|
||||
for (var i = 0; i < size; i++) {
|
||||
result[i] = data[i] * scalar;
|
||||
}
|
||||
return Tensor.fromTypedData(shape: shape, data: result, dtype: dtype);
|
||||
}
|
||||
|
||||
/// Apply function element-wise
|
||||
Tensor map(double Function(double) fn) {
|
||||
final result = Float64List(size);
|
||||
for (var i = 0; i < size; i++) {
|
||||
result[i] = fn(data[i]);
|
||||
}
|
||||
return Tensor.fromTypedData(shape: shape, data: result, dtype: dtype);
|
||||
}
|
||||
|
||||
/// ReLU activation
|
||||
Tensor relu() => map((x) => x > 0 ? x : 0);
|
||||
|
||||
/// Sigmoid activation
|
||||
Tensor sigmoid() => map((x) => 1.0 / (1.0 + math.exp(-x)));
|
||||
|
||||
/// Tanh activation
|
||||
Tensor tanh() => map(math.tanh);
|
||||
|
||||
/// Softmax (for 1D or last axis of 2D)
|
||||
Tensor softmax() {
|
||||
if (ndim == 1) {
|
||||
final maxVal = max();
|
||||
final expData = data.map((x) => math.exp(x - maxVal)).toList();
|
||||
final sumExp = expData.fold(0.0, (a, b) => a + b);
|
||||
return Tensor(
|
||||
shape: shape,
|
||||
data: expData.map((x) => x / sumExp).toList(),
|
||||
dtype: dtype,
|
||||
);
|
||||
} else if (ndim == 2) {
|
||||
final rows = shape[0];
|
||||
final cols = shape[1];
|
||||
final result = Float64List(size);
|
||||
|
||||
for (var i = 0; i < rows; i++) {
|
||||
var maxVal = double.negativeInfinity;
|
||||
for (var j = 0; j < cols; j++) {
|
||||
final v = data[i * cols + j];
|
||||
if (v > maxVal) maxVal = v;
|
||||
}
|
||||
|
||||
var sumExp = 0.0;
|
||||
for (var j = 0; j < cols; j++) {
|
||||
final exp = math.exp(data[i * cols + j] - maxVal);
|
||||
result[i * cols + j] = exp;
|
||||
sumExp += exp;
|
||||
}
|
||||
|
||||
for (var j = 0; j < cols; j++) {
|
||||
result[i * cols + j] /= sumExp;
|
||||
}
|
||||
}
|
||||
|
||||
return Tensor.fromTypedData(shape: shape, data: result, dtype: dtype);
|
||||
}
|
||||
throw UnsupportedError('Softmax only supported for 1D and 2D tensors');
|
||||
}
|
||||
|
||||
void _checkShapesMatch(Tensor other) {
|
||||
if (shape.length != other.shape.length) {
|
||||
throw ArgumentError('Shape mismatch: $shape vs ${other.shape}');
|
||||
}
|
||||
for (var i = 0; i < shape.length; i++) {
|
||||
if (shape[i] != other.shape[i]) {
|
||||
throw ArgumentError('Shape mismatch: $shape vs ${other.shape}');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert to JSON for API serialization
|
||||
Map<String, dynamic> toJson() => {
|
||||
'shape': shape,
|
||||
'data': base64Encode(data.buffer.asUint8List()),
|
||||
'dtype': dtype.value,
|
||||
if (id != null) 'id': id,
|
||||
};
|
||||
|
||||
/// Convert to nested list representation
|
||||
List<dynamic> toNestedList() {
|
||||
if (ndim == 1) {
|
||||
return data.toList();
|
||||
}
|
||||
|
||||
List<dynamic> buildNested(int dim, int offset) {
|
||||
if (dim == ndim - 1) {
|
||||
return data.sublist(offset, offset + shape[dim]).toList();
|
||||
}
|
||||
|
||||
final stride =
|
||||
shape.sublist(dim + 1).fold<int>(1, (a, b) => a * b);
|
||||
return List.generate(
|
||||
shape[dim],
|
||||
(i) => buildNested(dim + 1, offset + i * stride),
|
||||
);
|
||||
}
|
||||
|
||||
return buildNested(0, 0);
|
||||
}
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
if (size <= 20) {
|
||||
return 'Tensor(shape: $shape, data: ${toNestedList()})';
|
||||
}
|
||||
return 'Tensor(shape: $shape, dtype: ${dtype.value})';
|
||||
}
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) {
|
||||
if (identical(this, other)) return true;
|
||||
if (other is! Tensor) return false;
|
||||
if (shape.length != other.shape.length) return false;
|
||||
for (var i = 0; i < shape.length; i++) {
|
||||
if (shape[i] != other.shape[i]) return false;
|
||||
}
|
||||
for (var i = 0; i < size; i++) {
|
||||
if (data[i] != other.data[i]) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => Object.hash(shape, data);
|
||||
}
|
||||
371
sdk/flutter/lib/src/types.dart
Normal file
371
sdk/flutter/lib/src/types.dart
Normal file
|
|
@ -0,0 +1,371 @@
|
|||
/// Type definitions for Synor Compute SDK
|
||||
library synor_compute.types;
|
||||
|
||||
import 'package:collection/collection.dart';
|
||||
|
||||
/// Numeric precision for compute operations
|
||||
enum Precision {
|
||||
fp64('fp64'),
|
||||
fp32('fp32'),
|
||||
fp16('fp16'),
|
||||
bf16('bf16'),
|
||||
int8('int8'),
|
||||
int4('int4');
|
||||
|
||||
const Precision(this.value);
|
||||
final String value;
|
||||
|
||||
static Precision fromString(String s) =>
|
||||
Precision.values.firstWhere((p) => p.value == s, orElse: () => fp32);
|
||||
}
|
||||
|
||||
/// Target processor type for compute operations
|
||||
enum ProcessorType {
|
||||
cpu('cpu'),
|
||||
gpu('gpu'),
|
||||
tpu('tpu'),
|
||||
npu('npu'),
|
||||
lpu('lpu'),
|
||||
fpga('fpga'),
|
||||
dsp('dsp'),
|
||||
webgpu('webgpu'),
|
||||
wasm('wasm'),
|
||||
auto('auto');
|
||||
|
||||
const ProcessorType(this.value);
|
||||
final String value;
|
||||
|
||||
static ProcessorType fromString(String s) =>
|
||||
ProcessorType.values.firstWhere((p) => p.value == s, orElse: () => auto);
|
||||
}
|
||||
|
||||
/// Job priority levels
|
||||
enum Priority {
|
||||
low('low'),
|
||||
normal('normal'),
|
||||
high('high'),
|
||||
critical('critical');
|
||||
|
||||
const Priority(this.value);
|
||||
final String value;
|
||||
|
||||
static Priority fromString(String s) =>
|
||||
Priority.values.firstWhere((p) => p.value == s, orElse: () => normal);
|
||||
}
|
||||
|
||||
/// Job execution status
|
||||
enum JobStatus {
|
||||
pending('pending'),
|
||||
queued('queued'),
|
||||
running('running'),
|
||||
completed('completed'),
|
||||
failed('failed'),
|
||||
cancelled('cancelled');
|
||||
|
||||
const JobStatus(this.value);
|
||||
final String value;
|
||||
|
||||
bool get isTerminal =>
|
||||
this == completed || this == failed || this == cancelled;
|
||||
|
||||
static JobStatus fromString(String s) =>
|
||||
JobStatus.values.firstWhere((p) => p.value == s, orElse: () => pending);
|
||||
}
|
||||
|
||||
/// Balancing strategy for load distribution
|
||||
enum BalancingStrategy {
|
||||
speed('speed'),
|
||||
energy('energy'),
|
||||
balanced('balanced'),
|
||||
cost('cost'),
|
||||
latency('latency');
|
||||
|
||||
const BalancingStrategy(this.value);
|
||||
final String value;
|
||||
|
||||
static BalancingStrategy fromString(String s) => BalancingStrategy.values
|
||||
.firstWhere((p) => p.value == s, orElse: () => balanced);
|
||||
}
|
||||
|
||||
/// Tensor data type
|
||||
enum DType {
|
||||
float64('float64'),
|
||||
float32('float32'),
|
||||
float16('float16'),
|
||||
bfloat16('bfloat16'),
|
||||
int64('int64'),
|
||||
int32('int32'),
|
||||
int16('int16'),
|
||||
int8('int8'),
|
||||
uint8('uint8'),
|
||||
bool_('bool');
|
||||
|
||||
const DType(this.value);
|
||||
final String value;
|
||||
|
||||
static DType fromString(String s) =>
|
||||
DType.values.firstWhere((p) => p.value == s, orElse: () => float32);
|
||||
}
|
||||
|
||||
/// Configuration for SDK client
|
||||
class SynorConfig {
|
||||
final String apiKey;
|
||||
final String baseUrl;
|
||||
final Duration timeout;
|
||||
final int maxRetries;
|
||||
final ProcessorType defaultProcessor;
|
||||
final Precision defaultPrecision;
|
||||
final Priority defaultPriority;
|
||||
|
||||
const SynorConfig({
|
||||
required this.apiKey,
|
||||
this.baseUrl = 'https://compute.synor.io',
|
||||
this.timeout = const Duration(seconds: 30),
|
||||
this.maxRetries = 3,
|
||||
this.defaultProcessor = ProcessorType.auto,
|
||||
this.defaultPrecision = Precision.fp32,
|
||||
this.defaultPriority = Priority.normal,
|
||||
});
|
||||
|
||||
SynorConfig copyWith({
|
||||
String? apiKey,
|
||||
String? baseUrl,
|
||||
Duration? timeout,
|
||||
int? maxRetries,
|
||||
ProcessorType? defaultProcessor,
|
||||
Precision? defaultPrecision,
|
||||
Priority? defaultPriority,
|
||||
}) {
|
||||
return SynorConfig(
|
||||
apiKey: apiKey ?? this.apiKey,
|
||||
baseUrl: baseUrl ?? this.baseUrl,
|
||||
timeout: timeout ?? this.timeout,
|
||||
maxRetries: maxRetries ?? this.maxRetries,
|
||||
defaultProcessor: defaultProcessor ?? this.defaultProcessor,
|
||||
defaultPrecision: defaultPrecision ?? this.defaultPrecision,
|
||||
defaultPriority: defaultPriority ?? this.defaultPriority,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Matrix multiplication options
|
||||
class MatMulOptions {
|
||||
final Precision? precision;
|
||||
final ProcessorType? processor;
|
||||
final Priority? priority;
|
||||
final bool transposeA;
|
||||
final bool transposeB;
|
||||
|
||||
const MatMulOptions({
|
||||
this.precision,
|
||||
this.processor,
|
||||
this.priority,
|
||||
this.transposeA = false,
|
||||
this.transposeB = false,
|
||||
});
|
||||
|
||||
Map<String, dynamic> toJson() => {
|
||||
if (precision != null) 'precision': precision!.value,
|
||||
if (processor != null) 'processor': processor!.value,
|
||||
if (priority != null) 'priority': priority!.value,
|
||||
'transpose_a': transposeA,
|
||||
'transpose_b': transposeB,
|
||||
};
|
||||
}
|
||||
|
||||
/// Convolution options
|
||||
class Conv2dOptions {
|
||||
final List<int> kernel;
|
||||
final List<int> stride;
|
||||
final List<int> padding;
|
||||
final List<int> dilation;
|
||||
final int groups;
|
||||
final Precision? precision;
|
||||
final ProcessorType? processor;
|
||||
final Priority? priority;
|
||||
|
||||
const Conv2dOptions({
|
||||
this.kernel = const [3, 3],
|
||||
this.stride = const [1, 1],
|
||||
this.padding = const [0, 0],
|
||||
this.dilation = const [1, 1],
|
||||
this.groups = 1,
|
||||
this.precision,
|
||||
this.processor,
|
||||
this.priority,
|
||||
});
|
||||
|
||||
Map<String, dynamic> toJson() => {
|
||||
'kernel': kernel,
|
||||
'stride': stride,
|
||||
'padding': padding,
|
||||
'dilation': dilation,
|
||||
'groups': groups,
|
||||
if (precision != null) 'precision': precision!.value,
|
||||
if (processor != null) 'processor': processor!.value,
|
||||
if (priority != null) 'priority': priority!.value,
|
||||
};
|
||||
}
|
||||
|
||||
/// Flash attention options
|
||||
class AttentionOptions {
|
||||
final int numHeads;
|
||||
final double? scale;
|
||||
final bool causal;
|
||||
final double? dropoutP;
|
||||
final Precision? precision;
|
||||
final ProcessorType? processor;
|
||||
final Priority? priority;
|
||||
|
||||
const AttentionOptions({
|
||||
required this.numHeads,
|
||||
this.scale,
|
||||
this.causal = false,
|
||||
this.dropoutP,
|
||||
this.precision,
|
||||
this.processor,
|
||||
this.priority,
|
||||
});
|
||||
|
||||
Map<String, dynamic> toJson() => {
|
||||
'num_heads': numHeads,
|
||||
if (scale != null) 'scale': scale,
|
||||
'causal': causal,
|
||||
if (dropoutP != null) 'dropout_p': dropoutP,
|
||||
if (precision != null) 'precision': precision!.value,
|
||||
if (processor != null) 'processor': processor!.value,
|
||||
if (priority != null) 'priority': priority!.value,
|
||||
};
|
||||
}
|
||||
|
||||
/// LLM inference options
|
||||
class InferenceOptions {
|
||||
final int maxTokens;
|
||||
final double temperature;
|
||||
final double topP;
|
||||
final int? topK;
|
||||
final double? frequencyPenalty;
|
||||
final double? presencePenalty;
|
||||
final List<String>? stopSequences;
|
||||
final bool stream;
|
||||
final ProcessorType? processor;
|
||||
final Priority? priority;
|
||||
|
||||
const InferenceOptions({
|
||||
this.maxTokens = 256,
|
||||
this.temperature = 0.7,
|
||||
this.topP = 1.0,
|
||||
this.topK,
|
||||
this.frequencyPenalty,
|
||||
this.presencePenalty,
|
||||
this.stopSequences,
|
||||
this.stream = false,
|
||||
this.processor,
|
||||
this.priority,
|
||||
});
|
||||
|
||||
Map<String, dynamic> toJson() => {
|
||||
'max_tokens': maxTokens,
|
||||
'temperature': temperature,
|
||||
'top_p': topP,
|
||||
if (topK != null) 'top_k': topK,
|
||||
if (frequencyPenalty != null) 'frequency_penalty': frequencyPenalty,
|
||||
if (presencePenalty != null) 'presence_penalty': presencePenalty,
|
||||
if (stopSequences != null) 'stop_sequences': stopSequences,
|
||||
'stream': stream,
|
||||
if (processor != null) 'processor': processor!.value,
|
||||
if (priority != null) 'priority': priority!.value,
|
||||
};
|
||||
}
|
||||
|
||||
/// Pricing information for compute resources
|
||||
class PricingInfo {
|
||||
final ProcessorType processor;
|
||||
final double pricePerSecond;
|
||||
final double pricePerGflop;
|
||||
final int availableUnits;
|
||||
final double utilizationPercent;
|
||||
final String region;
|
||||
|
||||
const PricingInfo({
|
||||
required this.processor,
|
||||
required this.pricePerSecond,
|
||||
required this.pricePerGflop,
|
||||
required this.availableUnits,
|
||||
required this.utilizationPercent,
|
||||
required this.region,
|
||||
});
|
||||
|
||||
factory PricingInfo.fromJson(Map<String, dynamic> json) => PricingInfo(
|
||||
processor: ProcessorType.fromString(json['processor'] as String),
|
||||
pricePerSecond: (json['price_per_second'] as num).toDouble(),
|
||||
pricePerGflop: (json['price_per_gflop'] as num).toDouble(),
|
||||
availableUnits: json['available_units'] as int,
|
||||
utilizationPercent: (json['utilization_percent'] as num).toDouble(),
|
||||
region: json['region'] as String,
|
||||
);
|
||||
}
|
||||
|
||||
/// Compute usage statistics
|
||||
class UsageStats {
|
||||
final int totalJobs;
|
||||
final int completedJobs;
|
||||
final int failedJobs;
|
||||
final double totalComputeSeconds;
|
||||
final double totalCost;
|
||||
final Map<ProcessorType, double> costByProcessor;
|
||||
|
||||
const UsageStats({
|
||||
required this.totalJobs,
|
||||
required this.completedJobs,
|
||||
required this.failedJobs,
|
||||
required this.totalComputeSeconds,
|
||||
required this.totalCost,
|
||||
required this.costByProcessor,
|
||||
});
|
||||
|
||||
factory UsageStats.fromJson(Map<String, dynamic> json) {
|
||||
final costMap = <ProcessorType, double>{};
|
||||
final rawCostMap = json['cost_by_processor'] as Map<String, dynamic>?;
|
||||
if (rawCostMap != null) {
|
||||
for (final entry in rawCostMap.entries) {
|
||||
costMap[ProcessorType.fromString(entry.key)] =
|
||||
(entry.value as num).toDouble();
|
||||
}
|
||||
}
|
||||
|
||||
return UsageStats(
|
||||
totalJobs: json['total_jobs'] as int,
|
||||
completedJobs: json['completed_jobs'] as int,
|
||||
failedJobs: json['failed_jobs'] as int,
|
||||
totalComputeSeconds: (json['total_compute_seconds'] as num).toDouble(),
|
||||
totalCost: (json['total_cost'] as num).toDouble(),
|
||||
costByProcessor: costMap,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Exception thrown by Synor Compute operations
|
||||
class SynorException implements Exception {
|
||||
final String message;
|
||||
final String? code;
|
||||
final int? statusCode;
|
||||
final Map<String, dynamic>? details;
|
||||
|
||||
const SynorException(
|
||||
this.message, {
|
||||
this.code,
|
||||
this.statusCode,
|
||||
this.details,
|
||||
});
|
||||
|
||||
@override
|
||||
String toString() => 'SynorException: $message (code: $code)';
|
||||
|
||||
factory SynorException.fromJson(Map<String, dynamic> json) => SynorException(
|
||||
json['message'] as String? ?? 'Unknown error',
|
||||
code: json['code'] as String?,
|
||||
statusCode: json['status_code'] as int?,
|
||||
details: json['details'] as Map<String, dynamic>?,
|
||||
);
|
||||
}
|
||||
92
sdk/flutter/lib/synor_compute.dart
Normal file
92
sdk/flutter/lib/synor_compute.dart
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
/// Synor Compute SDK for Flutter/Dart
|
||||
///
|
||||
/// A high-performance SDK for distributed heterogeneous computing.
|
||||
/// Supports CPU, GPU, TPU, NPU, LPU, FPGA, DSP, WebGPU, and WASM processors.
|
||||
///
|
||||
/// ## Quick Start
|
||||
///
|
||||
/// ```dart
|
||||
/// import 'package:synor_compute/synor_compute.dart';
|
||||
///
|
||||
/// void main() async {
|
||||
/// // Create client
|
||||
/// final client = SynorCompute(apiKey: 'your-api-key');
|
||||
///
|
||||
/// // Matrix multiplication
|
||||
/// final a = Tensor.rand([512, 512]);
|
||||
/// final b = Tensor.rand([512, 512]);
|
||||
/// final result = await client.matmul(a, b, options: MatMulOptions(
|
||||
/// precision: Precision.fp16,
|
||||
/// processor: ProcessorType.gpu,
|
||||
/// ));
|
||||
///
|
||||
/// print('Result shape: ${result.result!.shape}');
|
||||
/// print('Execution time: ${result.executionTimeMs}ms');
|
||||
///
|
||||
/// // LLM Inference
|
||||
/// final response = await client.inference(
|
||||
/// 'llama-3-70b',
|
||||
/// 'Explain quantum computing',
|
||||
/// options: InferenceOptions(maxTokens: 256),
|
||||
/// );
|
||||
/// print(response.result);
|
||||
///
|
||||
/// // Streaming inference
|
||||
/// await for (final token in client.inferenceStream(
|
||||
/// 'llama-3-70b',
|
||||
/// 'Write a haiku about computing',
|
||||
/// )) {
|
||||
/// stdout.write(token);
|
||||
/// }
|
||||
///
|
||||
/// // Clean up
|
||||
/// client.dispose();
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// ## Features
|
||||
///
|
||||
/// - **Matrix Operations**: matmul, conv2d, attention, elementwise, reduce
|
||||
/// - **LLM Inference**: Standard and streaming inference
|
||||
/// - **Tensor Management**: Upload, download, and delete tensors
|
||||
/// - **Job Management**: Submit, poll, cancel, and list jobs
|
||||
/// - **Pricing**: Get real-time pricing for all processor types
|
||||
/// - **Usage Statistics**: Track compute usage and costs
|
||||
///
|
||||
/// ## Supported Processors
|
||||
///
|
||||
/// | Processor | Best For |
|
||||
/// |-----------|----------|
|
||||
/// | CPU | General compute, small batches |
|
||||
/// | GPU | Large matrix operations, training |
|
||||
/// | TPU | Tensor operations, inference |
|
||||
/// | NPU | Neural network inference |
|
||||
/// | LPU | Large language model inference |
|
||||
/// | FPGA | Custom operations, low latency |
|
||||
/// | DSP | Signal processing |
|
||||
/// | WebGPU | Browser-based compute |
|
||||
/// | WASM | Portable compute |
|
||||
library synor_compute;
|
||||
|
||||
export 'src/types.dart'
|
||||
show
|
||||
Precision,
|
||||
ProcessorType,
|
||||
Priority,
|
||||
JobStatus,
|
||||
BalancingStrategy,
|
||||
DType,
|
||||
SynorConfig,
|
||||
MatMulOptions,
|
||||
Conv2dOptions,
|
||||
AttentionOptions,
|
||||
InferenceOptions,
|
||||
PricingInfo,
|
||||
UsageStats,
|
||||
SynorException;
|
||||
|
||||
export 'src/tensor.dart' show Tensor;
|
||||
|
||||
export 'src/job.dart' show JobResult, JobStatusUpdate, Job, JobBatch;
|
||||
|
||||
export 'src/client.dart' show SynorCompute;
|
||||
37
sdk/flutter/pubspec.yaml
Normal file
37
sdk/flutter/pubspec.yaml
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
name: synor_compute
|
||||
description: Flutter/Dart SDK for Synor Compute - distributed heterogeneous computing platform
|
||||
version: 0.1.0
|
||||
homepage: https://github.com/mrgulshanyadav/Blockchain.cc
|
||||
repository: https://github.com/mrgulshanyadav/Blockchain.cc/tree/main/sdk/flutter
|
||||
issue_tracker: https://github.com/mrgulshanyadav/Blockchain.cc/issues
|
||||
|
||||
environment:
|
||||
sdk: '>=3.0.0 <4.0.0'
|
||||
flutter: '>=3.10.0'
|
||||
|
||||
dependencies:
|
||||
flutter:
|
||||
sdk: flutter
|
||||
http: ^1.1.0
|
||||
web_socket_channel: ^2.4.0
|
||||
json_annotation: ^4.8.1
|
||||
crypto: ^3.0.3
|
||||
collection: ^1.18.0
|
||||
|
||||
dev_dependencies:
|
||||
flutter_test:
|
||||
sdk: flutter
|
||||
flutter_lints: ^3.0.0
|
||||
build_runner: ^2.4.0
|
||||
json_serializable: ^6.7.0
|
||||
mockito: ^5.4.0
|
||||
|
||||
flutter:
|
||||
|
||||
platforms:
|
||||
android:
|
||||
ios:
|
||||
linux:
|
||||
macos:
|
||||
web:
|
||||
windows:
|
||||
Loading…
Add table
Reference in a new issue