-
Notifications
You must be signed in to change notification settings - Fork 287
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* Philipp clean up docs archive old docs (#819) * Make changes to makefile and index * Archive docker docs * Rework getting started section (#820) * Rework getting started section * Rename docker to worker * Add support for setting dataset type (#822) * Philipp lig 1200 clean up docs first steps (#821) * Rework getting started section * Streamline workflows * Use api workflow client to create dataset * Update FAQ (#824) * Igor lig 1201 clean up docs advanced (#823) * Rearrange docs based on new setup * Update datapool * Update datapool example * Disable pretagging in datapool example * Update pretagging * Use Lightly Worker instead of docker * Update active learning * Implement feedback * Philipp lig 1204 clean up docs examples (#825) * Rework datasets in the wild * Rework examples overview * Rework academic datsets * Igor lig 1203 clean up docs configuration (#826) * Update configuration * Make sure we use Lightly Worker * Implement feedback * Philipp lig 1202 clean up docs integration (#827) * Rework dagster tutorial * Drop the other integration parts * Add default thumbnail suffix (#828) * Igor lig 1231 finish cleanup docs (#829) * Align docs and add missing changes * Rename to register worker * Remove input volume mapping * Update overview section * Make sure we use Lightly Worker * Move bracket to new line Co-authored-by: philippmwirth <philipp.m.wirth@gmail.com> Co-authored-by: Philipp Wirth <65946090+philippmwirth@users.noreply.github.com> Co-authored-by: philippmwirth <philipp.m.wirth@gmail.com>
- Loading branch information
1 parent
806ba5a
commit a64edfc
Showing
80 changed files
with
5,889 additions
and
1,112 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
125 changes: 125 additions & 0 deletions
125
docs/source/docker/advanced/code_examples/python_run_datapool_example.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,125 @@ | ||
import json | ||
import lightly | ||
from lightly.openapi_generated.swagger_client.models.dataset_type import DatasetType | ||
from lightly.openapi_generated.swagger_client.models.datasource_purpose import DatasourcePurpose | ||
|
||
# Create the Lightly client to connect to the API. | ||
client = lightly.api.ApiWorkflowClient(token="YOUR_TOKEN") | ||
|
||
# Create a new dataset on the Lightly Platform. | ||
client.create_dataset('pedestrian-videos-datapool', | ||
dataset_type=DatasetType.VIDEOS) | ||
|
||
# Pick one of the following three blocks depending on where your data is | ||
# AWS S3 | ||
# Input bucket | ||
client.set_s3_config( | ||
resource_path="s3://bucket/input/", | ||
region='eu-central-1', | ||
access_key='S3-ACCESS-KEY', | ||
secret_access_key='S3-SECRET-ACCESS-KEY', | ||
purpose=DatasourcePurpose.INPUT | ||
) | ||
# Output bucket | ||
client.set_s3_config( | ||
resource_path="s3://bucket/output/", | ||
region='eu-central-1', | ||
access_key='S3-ACCESS-KEY', | ||
secret_access_key='S3-SECRET-ACCESS-KEY', | ||
purpose=DatasourcePurpose.LIGHTLY | ||
) | ||
|
||
|
||
# or Google Cloud Storage | ||
# Input bucket | ||
client.set_gcs_config( | ||
resource_path="gs://bucket/input/", | ||
project_id="PROJECT-ID", | ||
credentials=json.dumps(json.load(open('credentials_read.json'))), | ||
purpose=DatasourcePurpose.INPUT | ||
) | ||
# Output bucket | ||
client.set_gcs_config( | ||
resource_path="gs://bucket/output/", | ||
project_id="PROJECT-ID", | ||
credentials=json.dumps(json.load(open('credentials_write.json'))), | ||
purpose=DatasourcePurpose.LIGHTLY | ||
) | ||
|
||
|
||
# or Azure Blob Storage | ||
# Input bucket | ||
client.set_azure_config( | ||
container_name='my-container/input/', | ||
account_name='ACCOUNT-NAME', | ||
sas_token='SAS-TOKEN', | ||
purpose=DatasourcePurpose.INPUT | ||
) | ||
# Output bucket | ||
client.set_azure_config( | ||
container_name='my-container/output/', | ||
account_name='ACCOUNT-NAME', | ||
sas_token='SAS-TOKEN', | ||
purpose=DatasourcePurpose.LIGHTLY | ||
) | ||
|
||
|
||
# Schedule the compute run using our custom config. | ||
# We show here the full default config so you can easily edit the | ||
# values according to your needs. | ||
client.schedule_compute_worker_run( | ||
worker_config={ | ||
'enable_corruptness_check': True, | ||
'remove_exact_duplicates': True, | ||
'enable_training': False, | ||
'pretagging': False, | ||
'pretagging_debug': False, | ||
'method': 'coreset', | ||
'stopping_condition': { | ||
'n_samples': -1, | ||
'min_distance': 0.05 # we set the min_distance to 0.05 in this example | ||
} | ||
}, | ||
lightly_config={ | ||
'loader': { | ||
'batch_size': 128, | ||
'shuffle': True, | ||
'num_workers': -1, | ||
'drop_last': True | ||
}, | ||
'model': { | ||
'name': 'resnet-18', | ||
'out_dim': 128, | ||
'num_ftrs': 32, | ||
'width': 1 | ||
}, | ||
'trainer': { | ||
'gpus': 1, | ||
'max_epochs': 1, | ||
'precision': 16 | ||
}, | ||
'criterion': { | ||
'temperature': 0.5 | ||
}, | ||
'optimizer': { | ||
'lr': 1, | ||
'weight_decay': 0.00001 | ||
}, | ||
'collate': { | ||
'input_size': 64, | ||
'cj_prob': 0.8, | ||
'cj_bright': 0.7, | ||
'cj_contrast': 0.7, | ||
'cj_sat': 0.7, | ||
'cj_hue': 0.2, | ||
'min_scale': 0.15, | ||
'random_gray_scale': 0.2, | ||
'gaussian_blur': 0.0, | ||
'kernel_size': 0.1, | ||
'vf_prob': 0, | ||
'hf_prob': 0.5, | ||
'rr_prob': 0 | ||
} | ||
} | ||
) | ||
|
Oops, something went wrong.