Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/check-consistent-dependencies.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ defaults:
jobs:
check-requirements:
name: Compile requirements
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04

steps:
# Only run remaining steps if there are changes to requirements/**
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/ci-static-analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ jobs:
matrix:
python-version:
- "3.11"
os: ["ubuntu-22.04"]
os: ["ubuntu-24.04"]

steps:
- uses: actions/checkout@v4
Expand Down
5 changes: 4 additions & 1 deletion .github/workflows/js-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ jobs:
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}
cache: 'npm'

- name: Setup npm
run: npm i -g npm@10.5.x
Expand Down Expand Up @@ -63,7 +64,9 @@ jobs:
run: |
make base-requirements

- uses: c-hive/gha-npm-cache@v1
- name: Install npm
run: npm ci

- name: Run JS Tests
env:
TEST_SUITE: js-unit
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/migrations-check.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-22.04]
os: [ubuntu-24.04]
python-version:
- "3.11"
# 'pinned' is used to install the latest patch version of Django
Expand Down Expand Up @@ -126,7 +126,7 @@ jobs:
if: always()
needs:
- check_migrations
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- name: Decide whether the needed jobs succeeded or failed
# uses: re-actors/alls-green@v1.2.1
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/pylint-checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ on:

jobs:
run-pylint:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
strategy:
fail-fast: false
matrix:
Expand Down Expand Up @@ -75,7 +75,7 @@ jobs:
if: always()
needs:
- run-pylint
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- name: Decide whether the needed jobs succeeded or failed
# uses: re-actors/alls-green@v1.2.1
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/quality-checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-22.04]
os: [ubuntu-24.04]
python-version:
- "3.11"
node-version: [20]
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/static-assets-check.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-22.04]
os: [ubuntu-24.04]
python-version:
- "3.11"
node-version: [18, 20]
Expand Down
62 changes: 29 additions & 33 deletions .github/workflows/unit-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ concurrency:
jobs:
run-tests:
name: ${{ matrix.shard_name }}(py=${{ matrix.python-version }},dj=${{ matrix.django-version }},mongo=${{ matrix.mongo-version }})
runs-on: ubuntu-22.04
runs-on: ${{ matrix.os-version }}
strategy:
matrix:
python-version:
Expand Down Expand Up @@ -43,22 +43,27 @@ jobs:
- "xmodule-with-cms"
mongo-version:
- "7.0"
os-version:
- ubuntu-24.04

# We only need to test older versions of Mongo with modules that directly interface with Mongo (that is: xmodule.modulestore)
# This code is left here as an example for future refernce in case we need to reduce the number of shards we're
# testing but still have good confidence with older versions of mongo. We use Mongo 4.4 in the example.
# It's useful to run some subset of the tests on the older version of Ubuntu
# so that we don't spend too many resources on this but can find major issues quickly
# while we're in a situation where we support two versions. This section may be commented
# out when not in use to easily add/drop future support for any given major dependency.
#
# exclude:
# - mongo-version: "4.4"
# include:
# - shard_name: "xmodule-with-cms"
# python-version: "3.11"
# django-version: "pinned"
# mongo-version: "4.4"
# - shard_name: "xmodule-with-lms"
# python-version: "3.11"
# django-version: "pinned"
# mongo-version: "4.4"
# We're testing the older version of Ubuntu and running the xmodule tests since those rely on many
# dependent complex libraries and will hopefully catch most issues quickly.
include:
- shard_name: "xmodule-with-cms"
python-version: "3.11"
django-version: "pinned"
mongo-version: "7.0"
os-version: "ubuntu-22.04"
- shard_name: "xmodule-with-lms"
python-version: "3.11"
django-version: "pinned"
mongo-version: "7.0"
os-version: "ubuntu-22.04"

steps:
- name: checkout repo
Expand Down Expand Up @@ -90,19 +95,10 @@ jobs:
activate = 1
EOF

- name: install mongo version
run: |
if [[ "${{ matrix.mongo-version }}" != "4.4" ]]; then
wget -qO - https://www.mongodb.org/static/pgp/server-${{ matrix.mongo-version }}.asc | sudo apt-key add -
echo "deb https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/${{ matrix.mongo-version }} multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-${{ matrix.mongo-version }}.list
sudo apt-get update && sudo apt-get install -y mongodb-org="${{ matrix.mongo-version }}.*"
fi

- name: start mongod server for tests
run: |
sudo mkdir -p /data/db
sudo chmod -R a+rw /data/db
mongod &
- name: Start MongoDB
uses: supercharge/mongodb-github-action@1.11.0
with:
mongodb-version: ${{ matrix.mongo-version }}

- name: Setup Python
uses: actions/setup-python@v5
Expand Down Expand Up @@ -164,7 +160,7 @@ jobs:
overwrite: true

collect-and-verify:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
- name: Setup Python
Expand Down Expand Up @@ -229,7 +225,7 @@ jobs:
# https://github.com/orgs/community/discussions/33579
success:
name: Unit tests successful
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
if: always()
needs: [run-tests]
steps:
Expand All @@ -240,7 +236,7 @@ jobs:
jobs: ${{ toJSON(needs) }}

compile-warnings-report:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
needs: [run-tests]
steps:
- uses: actions/checkout@v4
Expand Down Expand Up @@ -268,7 +264,7 @@ jobs:
overwrite: true

merge-artifacts:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
needs: [compile-warnings-report]
steps:
- name: Merge Pytest Warnings JSON Artifacts
Expand All @@ -288,7 +284,7 @@ jobs:
# Combine and upload coverage reports.
coverage:
if: (github.repository == 'edx/edx-platform-private') || (github.repository == 'openedx/edx-platform' && (startsWith(github.base_ref, 'open-release') == false))
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
needs: [run-tests]
strategy:
matrix:
Expand Down
95 changes: 71 additions & 24 deletions cms/djangoapps/contentstore/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
import openedx.core.djangoapps.content_staging.api as content_staging_api
import openedx.core.djangoapps.content_tagging.api as content_tagging_api
from openedx.core.djangoapps.content_staging.data import LIBRARY_SYNC_PURPOSE

from .utils import reverse_course_url, reverse_library_url, reverse_usage_url

Expand Down Expand Up @@ -261,6 +262,37 @@ class StaticFileNotices:
error_files: list[str] = Factory(list)


def _insert_static_files_into_downstream_xblock(
downstream_xblock: XBlock, staged_content_id: int, request
) -> StaticFileNotices:
"""
Gets static files from staged content, and inserts them into the downstream XBlock.
"""
static_files = content_staging_api.get_staged_content_static_files(staged_content_id)
notices, substitutions = _import_files_into_course(
course_key=downstream_xblock.context_key,
staged_content_id=staged_content_id,
static_files=static_files,
usage_key=downstream_xblock.scope_ids.usage_id,
)

# Rewrite the OLX's static asset references to point to the new
# locations for those assets. See _import_files_into_course for more
# info on why this is necessary.
store = modulestore()
if hasattr(downstream_xblock, "data") and substitutions:
data_with_substitutions = downstream_xblock.data
for old_static_ref, new_static_ref in substitutions.items():
data_with_substitutions = data_with_substitutions.replace(
old_static_ref,
new_static_ref,
)
downstream_xblock.data = data_with_substitutions
if store is not None:
store.update_item(downstream_xblock, request.user.id)
return notices


def import_staged_content_from_user_clipboard(parent_key: UsageKey, request) -> tuple[XBlock | None, StaticFileNotices]:
"""
Import a block (along with its children and any required static assets) from
Expand Down Expand Up @@ -298,31 +330,43 @@ def import_staged_content_from_user_clipboard(parent_key: UsageKey, request) ->
tags=user_clipboard.content.tags,
)

# Now handle static files that need to go into Files & Uploads.
static_files = content_staging_api.get_staged_content_static_files(user_clipboard.content.id)
notices, substitutions = _import_files_into_course(
course_key=parent_key.context_key,
staged_content_id=user_clipboard.content.id,
static_files=static_files,
usage_key=new_xblock.scope_ids.usage_id,
)

# Rewrite the OLX's static asset references to point to the new
# locations for those assets. See _import_files_into_course for more
# info on why this is necessary.
if hasattr(new_xblock, 'data') and substitutions:
data_with_substitutions = new_xblock.data
for old_static_ref, new_static_ref in substitutions.items():
data_with_substitutions = data_with_substitutions.replace(
old_static_ref,
new_static_ref,
)
new_xblock.data = data_with_substitutions
store.update_item(new_xblock, request.user.id)
notices = _insert_static_files_into_downstream_xblock(new_xblock, user_clipboard.content.id, request)

return new_xblock, notices


def import_static_assets_for_library_sync(downstream_xblock: XBlock, lib_block: XBlock, request) -> StaticFileNotices:
"""
Import the static assets from the library xblock to the downstream xblock
through staged content. Also updates the OLX references to point to the new
locations of those assets in the downstream course.

Does not deal with permissions or REST stuff - do that before calling this.

Returns a summary of changes made to static files in the destination
course.
"""
if not lib_block.runtime.get_block_assets(lib_block, fetch_asset_data=False):
return StaticFileNotices()
if not content_staging_api:
raise RuntimeError("The required content_staging app is not installed")
staged_content = content_staging_api.stage_xblock_temporarily(lib_block, request.user.id, LIBRARY_SYNC_PURPOSE)
if not staged_content:
# expired/error/loading
return StaticFileNotices()

store = modulestore()
try:
with store.bulk_operations(downstream_xblock.context_key):
# Now handle static files that need to go into Files & Uploads.
# If the required files already exist, nothing will happen besides updating the olx.
notices = _insert_static_files_into_downstream_xblock(downstream_xblock, staged_content.id, request)
finally:
staged_content.delete()

return notices


def _fetch_and_set_upstream_link(
copied_from_block: str,
copied_from_version_num: int,
Expand Down Expand Up @@ -452,11 +496,11 @@ def _import_xml_node_to_parent(

if xblock_class.has_children and temp_xblock.children:
raise NotImplementedError("We don't yet support pasting XBlocks with children")
temp_xblock.parent = parent_key
if copied_from_block:
_fetch_and_set_upstream_link(copied_from_block, copied_from_version_num, temp_xblock, user)
# Save the XBlock into modulestore. We need to save the block and its parent for this to work:
new_xblock = store.update_item(temp_xblock, user.id, allow_not_found=True)
new_xblock.parent = parent_key
parent_xblock.children.append(new_xblock.location)
store.update_item(parent_xblock, user.id)

Expand Down Expand Up @@ -543,6 +587,9 @@ def _import_files_into_course(
if result is True:
new_files.append(file_data_obj.filename)
substitutions.update(substitution_for_file)
elif substitution_for_file:
# substitutions need to be made because OLX references to these files need to be updated
substitutions.update(substitution_for_file)
elif result is None:
pass # This file already exists; no action needed.
else:
Expand Down Expand Up @@ -613,8 +660,8 @@ def _import_file_into_course(
contentstore().save(content)
return True, {clipboard_file_path: f"static/{import_path}"}
elif current_file.content_digest == file_data_obj.md5_hash:
# The file already exists and matches exactly, so no action is needed
return None, {}
# The file already exists and matches exactly, so no action is needed except substitutions
return None, {clipboard_file_path: f"static/{import_path}"}
else:
# There is a conflict with some other file that has the same name.
return False, {}
Expand Down
4 changes: 4 additions & 0 deletions cms/djangoapps/contentstore/rest_api/v1/serializers/home.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,10 @@ class StudioHomeSerializer(serializers.Serializer):
child=serializers.CharField(),
allow_empty=True
)
allowed_organizations_for_libraries = serializers.ListSerializer(
child=serializers.CharField(),
allow_empty=True
)
archived_courses = CourseCommonSerializer(required=False, many=True)
can_access_advanced_settings = serializers.BooleanField()
can_create_organizations = serializers.BooleanField()
Expand Down
9 changes: 8 additions & 1 deletion cms/djangoapps/contentstore/rest_api/v1/views/home.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from organizations import api as org_api
from openedx.core.lib.api.view_utils import view_auth_classes

from ....utils import get_home_context, get_course_context, get_library_context
Expand Down Expand Up @@ -51,6 +52,7 @@ def get(self, request: Request):
"allow_to_create_new_org": true,
"allow_unicode_course_id": false,
"allowed_organizations": [],
"allowed_organizations_for_libraries": [],
"archived_courses": [],
"can_access_advanced_settings": true,
"can_create_organizations": true,
Expand Down Expand Up @@ -80,7 +82,12 @@ def get(self, request: Request):

home_context = get_home_context(request, True)
home_context.update({
'allow_to_create_new_org': settings.FEATURES.get('ENABLE_CREATOR_GROUP', True) and request.user.is_staff,
# 'allow_to_create_new_org' is actually about auto-creating organizations
# (e.g. when creating a course or library), so we add an additional test.
'allow_to_create_new_org': (
home_context['can_create_organizations'] and
org_api.is_autocreate_enabled()
),
'studio_name': settings.STUDIO_NAME,
'studio_short_name': settings.STUDIO_SHORT_NAME,
'studio_request_email': settings.FEATURES.get('STUDIO_REQUEST_EMAIL', ''),
Expand Down
Loading
Loading