+
Skip to content

Production deployment #19

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Dec 19, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 17 additions & 35 deletions ckanext/fork/actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,41 +123,24 @@ def _get_dataset_from_resource_uuid(context, uuid):


@toolkit.chained_action
def package_create_update(next_action, context, data_dict):
def package_create(next_action, context, data_dict):
for resource in data_dict.get("resources", []):
try:
original_resource_data = toolkit.get_action('resource_show')(
context,
{"id": resource["id"]}
)
except logic.NotFound as e:
log.info(f"No existing resource found with error: {e}")
original_resource_data = {}
except KeyError as e:
log.info(f"No resource id provided, probably a new resource. Original error: {e}")
original_resource_data = {}
except Exception as e:
log.exception([
"Trying to update a resource but I can't find the original resource ",
"to check the metadata to catch fork changes."
])
raise e

if resource.get("fork_resource") or original_resource_data.get("fork_resource"):
file_metadata_changed = False

for key in ['lfs_prefix', 'size', 'sha256', 'url_type']:
new_value = resource.get(key, "")
if new_value:
original_value = original_resource_data.get(key, "")
if original_value != new_value:
file_metadata_changed = True

if resource.get("fork_resource") and not file_metadata_changed:
resource = util.blob_storage_fork_resource(context, resource)
else:
resource['fork_resource'] = ''
resource['fork_activity'] = ''
if resource.get("fork_resource"):
resource = util.blob_storage_fork_resource(context, resource)

return next_action(context, data_dict)


@toolkit.chained_action
def package_update(next_action, context, data_dict):
for resource in data_dict.get("resources", []):
current = util.get_current_resource(context, resource)
resource_metadata_changed = util.check_metadata_for_file_change(current, resource)
if resource.get("fork_resource") and not resource_metadata_changed:
resource = util.blob_storage_fork_resource(context, resource)
else:
resource['fork_resource'] = ''
resource['fork_activity'] = ''

return next_action(context, data_dict)

Expand All @@ -174,4 +157,3 @@ def package_show(next_action, context, data_dict):
resource['fork_synced'] = util.is_synced_fork(context, resource)

return dataset

11 changes: 3 additions & 8 deletions ckanext/fork/helpers.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,17 @@
from ckanext.fork.util import get_forked_data
from ckan.plugins import toolkit


def get_parent_resource_details(resource_id):
try:
resource = toolkit.get_action('resource_show')(data_dict={'id': resource_id})
except toolkit.NotAuthorized:
return {'success': False,
'msg': "Unable to access parent resource information; current user may not be authorized to access this."}

try:
package = toolkit.get_action('package_show')(data_dict={'id': resource['package_id']})
except toolkit.NotAuthorized:
return {'success': False,
'msg': "Unable to access parent resource package information; current user may not be authorized to access this."}
'msg': "Unable to access parent resource information; current user may not be authorized to access this."}

organization = package['organization']

return {
'success': True,
'resource': {
Expand All @@ -33,7 +30,6 @@ def get_parent_resource_details(resource_id):
}



def fork_metadata(resource):
metadata = {}

Expand All @@ -59,4 +55,3 @@ def fork_metadata(resource):
pass

return metadata

8 changes: 3 additions & 5 deletions ckanext/fork/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import ckanext.fork.validators as fork_validators
from ckanext.fork.helpers import get_parent_resource_details


class ForkPlugin(plugins.SingletonPlugin, toolkit.DefaultDatasetForm):

plugins.implements(plugins.IConfigurer)
Expand Down Expand Up @@ -84,8 +85,8 @@ def get_actions(self):
return {
'resource_autocomplete': fork_actions.resource_autocomplete,
'package_show': fork_actions.package_show,
'package_create': fork_actions.package_create_update,
'package_update': fork_actions.package_create_update,
'package_create': fork_actions.package_create,
'package_update': fork_actions.package_update,
'dataset_fork': fork_actions.dataset_fork,
'resource_fork': fork_actions.resource_fork,
}
Expand All @@ -102,7 +103,4 @@ def get_validators(self):

# ITemplateHelpers
def get_helpers(self):
# Template helper function names should begin with the name of the
# extension they belong to, to avoid clashing with functions from
# other extensions.
return {'fork_get_parent_resource_details': get_parent_resource_details}
117 changes: 109 additions & 8 deletions ckanext/fork/tests/test_actions.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import io
import pytest
import ckan.plugins.toolkit as toolkit
from ckan.tests import factories
Expand Down Expand Up @@ -91,7 +92,7 @@ def test_resource_autocomplete_output_format(self, datasets):


@pytest.mark.usefixtures('clean_db')
class TestPackageShow():
class TestResourceShow():

def test_synced_fork_display(self, forked_data):
dataset = factories.Dataset()
Expand All @@ -115,9 +116,16 @@ def test_unsynced_fork(self, forked_data):


@pytest.mark.usefixtures('clean_db')
class TestPackageCreate():
class TestResourceCreate():

def test_resource_create_with_no_activity_id(self, forked_data):
def test_not_fork_resource_create(self):
dataset = factories.Dataset()
resource = factories.Resource(package_id=dataset['id'])
assert not resource.get('fork_resource', False)
assert not resource.get('fork_activity', False)
assert not resource.get('fork_synced', False)

def test_fork_resource_create_with_no_activity_id(self, forked_data):
dataset = factories.Dataset()
resource = call_action(
"resource_create",
Expand All @@ -129,7 +137,7 @@ def test_resource_create_with_no_activity_id(self, forked_data):
for key in ['sha256', 'size', 'lfs_prefix', 'url_type']:
assert resource[key] == forked_data['resource'][key]

def test_resource_create_with_activity_id(self, forked_data):
def test_fork_resource_create_with_activity_id(self, forked_data):
call_action('resource_patch', id=forked_data['resource']['id'], sha256='newsha')
dataset = factories.Dataset()
resource = call_action(
Expand All @@ -144,9 +152,9 @@ def test_resource_create_with_activity_id(self, forked_data):


@pytest.mark.usefixtures('clean_db')
class TestPackageUpdate():
class TestResourceUpdate():

def test_resource_update_with_no_activity_id(self, forked_data):
def test_fork_resource_update_with_no_activity_id(self, forked_data):
dataset = factories.Dataset()
resource = factories.Resource(
package_id=dataset['id'],
Expand All @@ -171,7 +179,7 @@ def test_resource_update_with_no_activity_id(self, forked_data):
for key in ['sha256', 'size', 'lfs_prefix', 'url_type']:
assert resource[key] == forked_data['resource'][key]

def test_resource_update_with_activity_id(self, forked_data):
def test_fork_resource_update_with_activity_id(self, forked_data):
call_action('resource_patch', id=forked_data['resource']['id'], sha256='newsha')
dataset = factories.Dataset()
resource = factories.Resource(
Expand All @@ -195,6 +203,99 @@ def test_resource_update_with_activity_id(self, forked_data):
for key in ['sha256', 'size', 'lfs_prefix', 'url_type']:
assert resource[key] == forked_data['resource'][key]

def test_non_fork_resource_update_with_new_fork_resource(self, forked_data):
dataset = factories.Dataset()
resource = factories.Resource(
package_id=dataset['id'],
)
resource = call_action(
"resource_update",
id=resource['id'],
fork_resource=forked_data['resource']['id'],
)
assert resource['fork_activity'] == forked_data['activity_id']

for key in ['sha256', 'size', 'lfs_prefix', 'url_type']:
assert resource[key] == forked_data['resource'][key]

def test_fork_resource_update_with_new_non_fork_resource_details(self, forked_data):
dataset = factories.Dataset()
resource = factories.Resource(
package_id=dataset['id'],
fork_resource=forked_data['resource']['id']
)
resource = call_action(
"resource_update",
id=resource['id'],
url='http://link.to.some.data'
)
assert resource['fork_resource'] == ''
assert resource['fork_activity'] == ''

def test_fork_resource_update_with_new_metadata(self, forked_data):
dataset = factories.Dataset()
resource = factories.Resource(
package_id=dataset['id'],
fork_resource=forked_data['resource']['id']
)
resource['description'] = 'New description'
resource = call_action(
"resource_update",
**resource
)
assert resource['fork_resource'] == forked_data['resource']['id']
assert resource['fork_activity'] == forked_data['activity_id']
assert resource['description'] == 'New description'

def test_non_fork_resource_update_with_new_fork_details(self, forked_data):
dataset = factories.Dataset()
resource = factories.Resource(
package_id=dataset['id'],
)
resource = call_action(
"resource_update",
id=resource['id'],
fork_resource=forked_data['resource']['id']
)
assert resource['fork_activity'] == forked_data['activity_id']

for key in ['sha256', 'size', 'lfs_prefix', 'url_type']:
assert resource[key] == forked_data['resource'][key]

def test_fork_resource_update_with_new_non_fork_resource_details_via_api_call(self, app, forked_data):
user = factories.Sysadmin()
dataset = factories.Dataset()
resource = factories.Resource(
package_id=dataset['id'],
fork_resource=forked_data['resource']['id']
)

headers = {
'Authorization': user['apikey'],
}
files = {
'id': resource['id'],
'upload': (io.BytesIO(b'some text data'), 'test.txt'),
}
response = app.post(
url=toolkit.url_for(
controller='api',
action='action',
logic_function='resource_update',
ver='/3'
),
headers=headers,
data=files
)
assert response.status_code == 200

resource = call_action(
"resource_show",
id=resource['id'],
)
assert resource['fork_resource'] == ''
assert resource['fork_activity'] == ''


@pytest.fixture
def dataset():
Expand All @@ -204,7 +305,7 @@ def dataset():
id="test-id",
owner_org=org['id'],
)
dataset['resources'] = resources=[factories.Resource(
dataset['resources'] = [factories.Resource(
package_id='test-id',
sha256='testsha256',
size=999,
Expand Down
30 changes: 30 additions & 0 deletions ckanext/fork/util.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
import logging
import ckan.logic as logic
from ckan.plugins import toolkit

log = logging.getLogger(__name__)


def get_forked_data(context, resource_id, activity_id=None):

Expand Down Expand Up @@ -49,3 +53,29 @@ def blob_storage_fork_resource(context, resource):

resource['fork_activity'] = forked_data['activity_id']
return resource


def get_current_resource(context, resource):
current_resource = {}
if resource.get("id"):
try:
current_resource = toolkit.get_action('resource_show')(
context,
{"id": resource["id"]}
)
except logic.NotFound:
log.info(f"Resource {resource['id']} does not exist "
"- must be creating a new resource with specific id.")
return current_resource


def check_metadata_for_file_change(current, resource):
file_metadata_changed = False
if resource.get("fork_resource") or current.get("fork_resource"):
for key in ['lfs_prefix', 'size', 'sha256', 'url_type']:
new_value = resource.get(key, "")
if new_value:
original_value = current.get(key, "")
if original_value != new_value:
file_metadata_changed = True
return file_metadata_changed
点击 这是indexloc提供的php浏览器服务,不要输入任何密码和下载