diff --git a/composer.json b/composer.json index 4fb057b..994e3f7 100644 --- a/composer.json +++ b/composer.json @@ -15,7 +15,6 @@ } ], "require": { - "biigle/laravel-aruna-driver": "^1.0", "league/flysystem-aws-s3-v3": "^3.12", "league/flysystem-read-only": "^3.3" }, diff --git a/src/Database/migrations/2024_02_26_110100_remove_aruna_type.php b/src/Database/migrations/2024_02_26_110100_remove_aruna_type.php new file mode 100644 index 0000000..17b8044 --- /dev/null +++ b/src/Database/migrations/2024_02_26_110100_remove_aruna_type.php @@ -0,0 +1,29 @@ +delete(); + } + + /** + * Reverse the migrations. + * + * @return void + */ + public function down() + { + // + } +}; diff --git a/src/Http/Requests/StoreUserDisk.php b/src/Http/Requests/StoreUserDisk.php index 84a44eb..63d18b6 100644 --- a/src/Http/Requests/StoreUserDisk.php +++ b/src/Http/Requests/StoreUserDisk.php @@ -26,7 +26,7 @@ public function rules() { return array_merge([ 'name' => 'required', - 'type' => 'required|in:s3,aruna', + 'type' => 'required|in:s3', ], $this->getTypeValidationRules()); } diff --git a/src/config/user_disks.php b/src/config/user_disks.php index 75d171c..26e9333 100644 --- a/src/config/user_disks.php +++ b/src/config/user_disks.php @@ -7,7 +7,6 @@ */ 'types' => [ 's3' => 'S3', - 'aruna' => 'Aruna Object Storage', ], /* @@ -34,18 +33,6 @@ 'secret' => '', 'endpoint' => '', ], - 'aruna' => [ - // These options are fixed. - 'driver' => 'aruna', - 'apiUri' => 'https://api.aruna-storage.org', - // These should be configured by the user. - 'bucket' => '', - 'key' => '', - 'secret' => '', - 'endpoint' => '', - 'collectionId' => '', - 'token' => '', - ], ], /* @@ -60,14 +47,6 @@ 'key' => 'required', 'secret' => 'required', ], - 'aruna' => [ - 'bucket' => 'required', - 'endpoint' => 'required|url', - 'key' => 'required', - 'secret' => 'required', - 'collectionId' => 'required', - 'token' => 'required', - ], ], /* @@ -82,14 +61,6 @@ 'key' => 'filled', 'secret' => 'filled', ], - 'aruna' => [ - 'bucket' => 'filled', - 'endpoint' => 'filled|url', - 'key' => 'filled', - 'secret' => 'filled', - 'collectionId' => 'filled', - 'token' => 'filled', - ], ], /* diff --git a/src/resources/views/manual/tutorials/about.blade.php b/src/resources/views/manual/tutorials/about.blade.php index 4cd6e21..0ab5079 100644 --- a/src/resources/views/manual/tutorials/about.blade.php +++ b/src/resources/views/manual/tutorials/about.blade.php @@ -40,9 +40,8 @@ - @foreach(config('user_disks.types') as $type => $name) - @include("user-disks::manual.types.{$type}") - @endforeach + @include("user-disks::manual.types.s3") + @include("user-disks::manual.types.aos") @endsection diff --git a/src/resources/views/manual/types/aos.blade.php b/src/resources/views/manual/types/aos.blade.php new file mode 100644 index 0000000..59d2130 --- /dev/null +++ b/src/resources/views/manual/types/aos.blade.php @@ -0,0 +1,113 @@ +

Aruna Object Storage

+ +

+ The Aruna Object Storage (AOS) is a storage service for the German initiative for a national research data infrastructure (NFDI). Before you can start using AOS, you have to sign up for a user account on the website. +

+ +

+ While the connection to AOS can be established via the same S3 protocol that is described above, the setup and configuration works a little differently. Here is a description of the S3 options for AOS: +

+ +
+
Bucket name
+
+

+ The name of your AOS project. +

+
+ +
Endpoint
+
+

+ The endpoint is the URL https://<bucket>.data.gi.aruna-storage.org where <bucket> is replaced with the bucket name above. +

+
+ +
Access key
+
+

+ The "AccessKey" that is provided with new data proxy credentials. +

+
+ +
Secret key
+
+

+ The "SecretKey" that is provided with new data proxy credentials. +

+
+
+ +

+ Detailed setup instructions: +

+ +
    +
  1. +

    + Log in to the AOS dashboard, select "Explore" and then "Resources" in the menu at the top. +

    +
  2. +
  3. +

    + Click on the "Create new" button and create a new project (we call it "myproject" here). The project name is the value of the Bucket name field that is required to create the new storage disk. With the project name you can also fill the Endpoint field. +

    +
  4. +
  5. +

    + Now select "Access" and then "Data proxies" in the AOS menu at the top. Choose a data proxy where you would like to store your data. There, click on the "Create Credential" button. The AccessKey is the value of the Access key field and the SecretKey is the value of the Secret key field that is required to create the new storage disk. +

    +
  6. +
+ +

+ Now you have the values for all fields that are required to create the new storage disk. However, one more step is required before you can annotate your data without restrictions in BIIGLE. You have to configure "Cross-origin resource sharing (CORS)". This is done as follows: +

+ +
    +
  1. +

    + Install s3cmd and run s3cmd --configure. Enter the access key and secret key from above. Don't change the default region. Enter the S3 endpoint data.gi.aruna-storage.org and the bucket template %(bucket)s.data.gi.aruna-storage.org. Leave the remaining options unchanged. Don't run the test with the supplied credentials and save the settings. +

    +
  2. +
  3. +

    + Create a file called cors.xml with the following content: +

    +
    +<CORSConfiguration>
    +  <CORSRule>
    +    <AllowedOrigin>{{url('/')}}</AllowedOrigin>
    +    <AllowedMethod>GET</AllowedMethod>
    +    <AllowedHeader>*</AllowedHeader>
    +    <MaxAgeSeconds>30</MaxAgeSeconds>
    +  </CORSRule>
    +</CORSConfiguration>
    +
    +

    + Then run the following command: s3cmd setcors cors.xml s3://myproject (you should replace "myproject" with the actual name of your project). That's it. Now CORS is configured for your project. +

    +
  4. +
+ +

+ Here is a brief example for how you can upload files to your project. This is also done with s3cmd; +

+ +
    +
  1. +

    + Make sure s3cmd is configured as described above. +

    +
  2. +
  3. +

    + Now navigate to the parent of the directory that you want to upload. Upload the whole directory with the following command (replace "mydir" with the name of the directory to upload and "myproject" with the name of your project): +

    +
    s3cmd put -r mydir s3://myproject/
    +

    + The directory will be created as a new dataset as part of your AOS project. In BIIGLE, you will see it as a directory in the file browser. +

    +
  4. +
+ diff --git a/src/resources/views/manual/types/aruna.blade.php b/src/resources/views/manual/types/aruna.blade.php deleted file mode 100644 index 63c2484..0000000 --- a/src/resources/views/manual/types/aruna.blade.php +++ /dev/null @@ -1,124 +0,0 @@ -

Aruna Object Storage

- -

- The Aruna Object Storage (AOS) is a storage service for the German initiative for a national research data infrastructure (NFDI). Before you can start using AOS, you have to sign up for a user account on the website. -

- -

- An AOS storage disk has the following options: -

- -
-
Collection ID
-
-

- The ID of the AOS collection as returned by the API. -

-
- -
Bucket name
-
-

- The name of the collection bucket which consists of the collection version (or "latest"), the collection name and the project name joined with dots. -

-
- -
Endpoint
-
-

- The endpoint is the URL https://<bucket>.data.gi.aruna-storage.org where <bucket> is replaced with the bucket name above. -

-
- -
Access key
-
-

- The "S3 access key" that is provided with a new access token. -

-
- -
Secret key
-
-

- The "S3 secret key" that is provided with a new access token. -

-
- -
API secret
-
-

- The "secret" that is provided with a new access token. -

-
-
- -

- Detailed setup instructions: -

- -
    -
  1. -

    - Log in to the AOS dashboard and note down the ID (ULID) of your project. -

    -
  2. -
  3. -

    - Create a new token with the "project" token type, choose a name, enter the project ULID, choose "MODIFY" permissions and click "Create". The note down the "secret". -

    -
  4. -
  5. -

    - Create a new collection in the project with the following cURL request to the API (choose your own collection name and description and replace PROJECT_ULID with the project ID and TOKEN with the token secret): -

    -
    -curl -d '
    -  {
    -    "name": "mycollection",
    -    "description": "This is my collection.",
    -    "projectId": "PROJECT_ULID",
    -    "dataclass": "DATA_CLASS_PRIVATE"
    -  }' \
    -  -H 'Authorization: Bearer TOKEN' \
    -  -H 'Content-Type: application/json' \
    -  -X POST https://api.aruna-storage.org/v1/collection
    -
    -

    - The returned collectionID is the value of the Collection ID field that is required to create a new AOS storage disk. -

    -

    - The value of the Bucket name field can be determined now, too. It consists of the collection version (or "latest"), the collection name and the project name joined with dots (example: latest.mycollection.myproject). -

    -

    - Finally, the value of the Endpoint field can be determined. It is the URL https://<bucket>.data.gi.aruna-storage.org where <bucket> is replaced with the bucket name above. -

    -
  6. -
  7. -

    - Now go back to the dashboard and create a new token. This time it should have the token type "collection". Enter the collection ULID and choose "READ" permissions. This token provides the "S3 access key" as the value of the Access key field, the "S3 secret key" as the value of the Secret key field and the "Secret" as the value of the API secret field that are required to create a new AOS storage disk. -

    -
  8. -
- -

- Now you have the values for all fields that are required to create a new AOS storage disk. Here is a brief example for how you can upload files to your collection using the S3 protocol in a command line: -

- -
    -
  1. -

    - Go to the AOS dashboard and create a new token. It should have the token type "collection". Enter the collection ULID and choose "MODIFY" permissions. Note down the "S3 access key" and "S3 secret key". -

    -
  2. -
  3. -

    - Install `s3cmd` and run `s3cmd --configure`. Enter the access key and secret key. Don't change the default region. Enter the S3 endpoint data.gi.aruna-storage.org and the bucket template %(bucket)s.data.gi.aruna-storage.org. Leave the remaining options unchanged. Don't run the test with the supplied credentials and save the settings. -

    -
  4. -
  5. -

    - Now navigate to the directory of files that you want to upload. Currently, no subdirectories can be uploaded via S3. Upload all files of the current directory with the command (replace mycollection and myproject with the names of your AOS collection and project, respectively): -

    -
    s3cmd put * s3://latest.mycollection.myproject/
    -
  6. -
diff --git a/src/resources/views/store/aruna.blade.php b/src/resources/views/store/aruna.blade.php deleted file mode 100644 index 61183aa..0000000 --- a/src/resources/views/store/aruna.blade.php +++ /dev/null @@ -1,70 +0,0 @@ -
-
- - - @error('collectionId') -

{{$message}}

- @enderror -
-
-
-
- - - @error('bucket') -

{{$message}}

- @enderror -

- The bucket name consists of the collection version (or "latest"), the collection name and the project name joined with dots. -

-
-
-
-
- - - @error('endpoint') -

{{$message}}

- @enderror -

- The endpoint is the URL https://<bucket>.data.gi.aruna-storage.org where <bucket> is replaced with the bucket name above. -

-
-
-
-
- - - @error('key') -

{{$message}}

- @enderror -
-
-
-
- - - @error('secret') -

{{$message}}

- @enderror -
-
-
-
- - - @error('token') -

{{$message}}

- @enderror -
-
-
-
- -

- The access credentials are stored in the BIIGLE database and minimum permissions reduce risk in case of exposure. -

-
-
diff --git a/src/resources/views/update/aruna.blade.php b/src/resources/views/update/aruna.blade.php deleted file mode 100644 index 777b940..0000000 --- a/src/resources/views/update/aruna.blade.php +++ /dev/null @@ -1,67 +0,0 @@ -
-
- - - @error('collectionId') -

{{$message}}

- @enderror -
-
-
-
- - - @error('bucket') -

{{$message}}

- @enderror -

- The bucket name consists of the collection version (or "latest"), the collection name and the project name joined with dots. -

-
-
-
-
- - - @error('endpoint') -

{{$message}}

- @enderror -

- The endpoint is the URL https://<bucket>.data.gi.aruna-storage.org where <bucket> is replaced with the bucket name above. -

-
-
-
-
-
- Your access credentials are stored in the BIIGLE database. Please configure the credentials to have only the minimum required permissions. -
-
-
-
-
- - - @error('key') -

{{$message}}

- @enderror -
-
-
-
- - - @error('secret') -

{{$message}}

- @enderror -
-
-
-
- - - @error('token') -

{{$message}}

- @enderror -
-
diff --git a/tests/Http/Controllers/Api/UserDiskControllerTest.php b/tests/Http/Controllers/Api/UserDiskControllerTest.php index b961956..5ad2d51 100644 --- a/tests/Http/Controllers/Api/UserDiskControllerTest.php +++ b/tests/Http/Controllers/Api/UserDiskControllerTest.php @@ -162,43 +162,6 @@ public function testStoreS3PathStyle() $this->assertEquals($expect, $disk->options); } - public function testStoreAruna() - { - $this->beUser(); - $this->postJson("/api/v1/user-disks", [ - 'name' => 'my disk', - 'type' => 'aruna', - ]) - ->assertStatus(422); - - $this->postJson("/api/v1/user-disks", [ - 'name' => 'my disk', - 'type' => 'aruna', - 'key' => 'abc', - 'secret' => 'abc', - 'bucket' => 'bucket', - 'endpoint' => 'http://bucket.example.com', - 'collectionId' => 'MYARUNACOLLECTIONULID', - 'token' => 'MYSECRETTOKEN', - ]) - ->assertStatus(201); - - $disk = UserDisk::where('user_id', $this->user()->id)->first(); - $this->assertNotNull($disk); - $this->assertEquals('my disk', $disk->name); - $this->assertEquals('aruna', $disk->type); - $this->assertNotNull($disk->expires_at); - $expect = [ - 'key' => 'abc', - 'secret' => 'abc', - 'bucket' => 'bucket', - 'endpoint' => 'http://bucket.example.com', - 'collectionId' => 'MYARUNACOLLECTIONULID', - 'token' => 'MYSECRETTOKEN', - ]; - $this->assertEquals($expect, $disk->options); - } - public function testUpdate() { $disk = UserDisk::factory()->create(); @@ -298,48 +261,6 @@ public function testUpdateS3PathStyle() $this->assertEquals($expect, $disk->options); } - public function testUpdateAruna() - { - $disk = UserDisk::factory()->create([ - 'type' => 'aruna', - 'name' => 'abc', - 'options' => [ - 'key' => 'def', - 'secret' => 'ghi', - 'bucket' => 'jkl', - 'endpoint' => 'https://jkl.example.com', - 'collectionId' => 'pqr', - 'token' => 'stu', - ], - ]); - - $this->be($disk->user); - $this->putJson("/api/v1/user-disks/{$disk->id}", [ - 'type' => 'unknown', - 'name' => 'cba', - 'key' => 'fed', - 'secret' => 'ihg', - 'bucket' => 'onm', - 'endpoint' => 'https://lkj.example.com', - 'collectionId' => 'rqp', - 'token' => 'uts', - ]) - ->assertStatus(200); - - $disk->refresh(); - $expect = [ - 'key' => 'fed', - 'secret' => 'ihg', - 'bucket' => 'onm', - 'endpoint' => 'https://lkj.example.com', - 'collectionId' => 'rqp', - 'token' => 'uts', - ]; - $this->assertEquals('aruna', $disk->type); - $this->assertEquals('cba', $disk->name); - $this->assertEquals($expect, $disk->options); - } - public function testUpdateEmpty() { $options = [