Compare commits

...

6 Commits

Author SHA1 Message Date
Markos Gogoulos
94c646fdb8 update metadata only, on API call 2024-09-20 19:26:13 +03:00
Markos Gogoulos
d665058b80 speed up docker start 2024-09-20 13:02:00 +03:00
Markos Gogoulos
986c7d1074 add validation to files uploading to avoid client side pushing arbitrary data (#1057) 2024-09-20 13:01:33 +03:00
thau0x01
1adee8c156 fix #943 (#1052) 2024-09-20 12:53:56 +03:00
makerduck
ffd7a52863 Fix postgres role output (#1029) 2024-09-20 12:52:50 +03:00
Kyle Maas
c5047d8df8 Fix null bug in More Options button (#913) 2023-11-14 09:24:05 +02:00
10 changed files with 41 additions and 16 deletions

View File

@@ -28,7 +28,8 @@ else
fi fi
# We should do this only for folders that have a different owner, since it is an expensive operation # We should do this only for folders that have a different owner, since it is an expensive operation
find /home/mediacms.io/ ! \( -user www-data -group $TARGET_GID \) -exec chown www-data:$TARGET_GID {} + # Also ignoring .git folder to fix this issue https://github.com/mediacms-io/mediacms/issues/934
find /home/mediacms.io/mediacms ! \( -path "*.git*" \) -exec chown www-data:$TARGET_GID {} +
chmod +x /home/mediacms.io/mediacms/deploy/docker/start.sh /home/mediacms.io/mediacms/deploy/docker/prestart.sh chmod +x /home/mediacms.io/mediacms/deploy/docker/start.sh /home/mediacms.io/mediacms/deploy/docker/prestart.sh

View File

@@ -57,7 +57,7 @@ services:
POSTGRES_DB: mediacms POSTGRES_DB: mediacms
TZ: Europe/London TZ: Europe/London
healthcheck: healthcheck:
test: ["CMD-SHELL", "pg_isready", "--host=db", "--dbname=$POSTGRES_DB", "--username=$POSTGRES_USER"] test: ["CMD-SHELL", "pg_isready -d $${POSTGRES_DB} -U $${POSTGRES_USER}", "--host=db", "--dbname=$POSTGRES_DB", "--username=$POSTGRES_USER"]
interval: 10s interval: 10s
timeout: 5s timeout: 5s
retries: 5 retries: 5

View File

@@ -78,7 +78,7 @@ services:
POSTGRES_DB: mediacms POSTGRES_DB: mediacms
TZ: Europe/London TZ: Europe/London
healthcheck: healthcheck:
test: ["CMD-SHELL", "pg_isready", "--host=db", "--dbname=$POSTGRES_DB", "--username=$POSTGRES_USER"] test: ["CMD-SHELL", "pg_isready -d $${POSTGRES_DB} -U $${POSTGRES_USER}", "--host=db", "--dbname=$POSTGRES_DB", "--username=$POSTGRES_USER"]
interval: 10s interval: 10s
timeout: 5s timeout: 5s
retries: 5 retries: 5

View File

@@ -80,7 +80,7 @@ services:
POSTGRES_DB: mediacms POSTGRES_DB: mediacms
TZ: Europe/London TZ: Europe/London
healthcheck: healthcheck:
test: ["CMD-SHELL", "pg_isready", "--host=db", "--dbname=$POSTGRES_DB", "--username=$POSTGRES_USER"] test: ["CMD-SHELL", "pg_isready -d $${POSTGRES_DB} -U $${POSTGRES_USER}", "--host=db", "--dbname=$POSTGRES_DB", "--username=$POSTGRES_USER"]
interval: 10s interval: 10s
timeout: 5s timeout: 5s
retries: 5 retries: 5

View File

@@ -100,7 +100,7 @@ services:
POSTGRES_DB: mediacms POSTGRES_DB: mediacms
TZ: Europe/London TZ: Europe/London
healthcheck: healthcheck:
test: ["CMD-SHELL", "pg_isready", "--host=db", "--dbname=$POSTGRES_DB", "--username=$POSTGRES_USER"] test: ["CMD-SHELL", "pg_isready -d $${POSTGRES_DB} -U $${POSTGRES_USER}", "--host=db", "--dbname=$POSTGRES_DB", "--username=$POSTGRES_USER"]
interval: 30s interval: 30s
timeout: 10s timeout: 10s
retries: 5 retries: 5

View File

@@ -76,7 +76,7 @@ services:
POSTGRES_DB: mediacms POSTGRES_DB: mediacms
TZ: Europe/London TZ: Europe/London
healthcheck: healthcheck:
test: ["CMD-SHELL", "pg_isready", "--host=db", "--dbname=$POSTGRES_DB", "--username=$POSTGRES_USER"] test: ["CMD-SHELL", "pg_isready -d $${POSTGRES_DB} -U $${POSTGRES_USER}", "--host=db", "--dbname=$POSTGRES_DB", "--username=$POSTGRES_USER"]
interval: 30s interval: 30s
timeout: 10s timeout: 10s
retries: 5 retries: 5

View File

@@ -72,7 +72,7 @@ services:
POSTGRES_DB: mediacms POSTGRES_DB: mediacms
TZ: Europe/London TZ: Europe/London
healthcheck: healthcheck:
test: ["CMD-SHELL", "pg_isready", "--host=db", "--dbname=$POSTGRES_DB", "--username=$POSTGRES_USER"] test: ["CMD-SHELL", "pg_isready -d $${POSTGRES_DB} -U $${POSTGRES_USER}", "--host=db", "--dbname=$POSTGRES_DB", "--username=$POSTGRES_USER"]
interval: 10s interval: 10s
timeout: 5s timeout: 5s
retries: 5 retries: 5

View File

@@ -598,14 +598,15 @@ class MediaDetail(APIView):
media = self.get_object(friendly_token) media = self.get_object(friendly_token)
if isinstance(media, Response): if isinstance(media, Response):
return media return media
serializer = MediaSerializer(media, data=request.data, context={"request": request}) serializer = MediaSerializer(media, data=request.data, context={"request": request})
if serializer.is_valid(): if serializer.is_valid():
if request.data.get('media_file'): serializer.save(user=request.user)
media_file = request.data["media_file"] # no need to update the media file itself, only the metadata
serializer.save(user=request.user, media_file=media_file) #if request.data.get('media_file'):
else: # media_file = request.data["media_file"]
serializer.save(user=request.user) # serializer.save(user=request.user, media_file=media_file)
#else:
# serializer.save(user=request.user)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)

View File

@@ -22,7 +22,7 @@ function downloadOptions(mediaData, allowDownload) {
if (Object.keys(encodingsInfo[k]).length) { if (Object.keys(encodingsInfo[k]).length) {
for (g in encodingsInfo[k]) { for (g in encodingsInfo[k]) {
if (encodingsInfo[k].hasOwnProperty(g)) { if (encodingsInfo[k].hasOwnProperty(g)) {
if ('success' === encodingsInfo[k][g].status && 100 === encodingsInfo[k][g].progress) { if ('success' === encodingsInfo[k][g].status && 100 === encodingsInfo[k][g].progress && null !== encodingsInfo[k][g].url) {
options[encodingsInfo[k][g].title] = { options[encodingsInfo[k][g].title] = {
text: k + ' - ' + g.toUpperCase() + ' (' + encodingsInfo[k][g].size + ')', text: k + ' - ' + g.toUpperCase() + ' (' + encodingsInfo[k][g].size + ')',
link: formatInnerLink(encodingsInfo[k][g].url, site.url), link: formatInnerLink(encodingsInfo[k][g].url, site.url),

View File

@@ -1,4 +1,7 @@
import re
import shutil import shutil
import os
import uuid
from io import StringIO from io import StringIO
from os.path import join from os.path import join
@@ -7,16 +10,31 @@ from django.conf import settings
from . import utils from . import utils
def is_valid_uuid_format(uuid_string):
pattern = re.compile(r'^[a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[89ab][a-f0-9]{3}-[a-f0-9]{12}$', re.IGNORECASE)
return bool(pattern.match(uuid_string))
class BaseFineUploader(object): class BaseFineUploader(object):
def __init__(self, data, *args, **kwargs): def __init__(self, data, *args, **kwargs):
self.data = data self.data = data
self.total_filesize = data.get("qqtotalfilesize")
self.filename = data.get("qqfilename") self.filename = data.get("qqfilename")
self.uuid = data.get("qquuid") self.uuid = data.get("qquuid")
if not is_valid_uuid_format(self.uuid):
# something nasty client side could be happening here
# generate new uuid to ensure this is uuid
# not sure if this will work with the chunked uploads though
self.uuid = uuid.uuid4()
self.filename = os.path.basename(self.filename)
# avoid possibility of passing a fake path here
self.file = data.get("qqfile") self.file = data.get("qqfile")
self.storage_class = settings.FILE_STORAGE self.storage_class = settings.FILE_STORAGE
self.real_path = None self.real_path = None
@property @property
def finished(self): def finished(self):
return self.real_path is not None return self.real_path is not None
@@ -50,7 +68,11 @@ class ChunkedFineUploader(BaseFineUploader):
self.total_parts = data.get("qqtotalparts") self.total_parts = data.get("qqtotalparts")
if not isinstance(self.total_parts, int): if not isinstance(self.total_parts, int):
self.total_parts = 1 self.total_parts = 1
self.part_index = data.get("qqpartindex") qqpartindex = data.get("qqpartindex")
if not isinstance(qqpartindex, int):
# something nasty client side could be happening here
qqpartindex = 0
self.part_index = qqpartindex
@property @property
def chunks_path(self): def chunks_path(self):
@@ -75,6 +97,7 @@ class ChunkedFineUploader(BaseFineUploader):
def combine_chunks(self): def combine_chunks(self):
# implement the same behaviour. # implement the same behaviour.
self.real_path = self.storage.save(self._full_file_path, StringIO()) self.real_path = self.storage.save(self._full_file_path, StringIO())
with self.storage.open(self.real_path, "wb") as final_file: with self.storage.open(self.real_path, "wb") as final_file:
for i in range(self.total_parts): for i in range(self.total_parts):
part = join(self.chunks_path, str(i)) part = join(self.chunks_path, str(i))