Merge branch 'release/4.1.0' into develop

This commit is contained in:
Chocobozzz 2022-03-08 11:28:32 +01:00
commit 7b51ede977
No known key found for this signature in database
GPG Key ID: 583A612D890159BE
15 changed files with 132 additions and 44 deletions

View File

@ -8,7 +8,7 @@ runs:
- name: Setup system dependencies - name: Setup system dependencies
shell: bash shell: bash
run: | run: |
sudo apt-get install postgresql-client-common redis-tools parallel sudo apt-get install postgresql-client-common redis-tools parallel libimage-exiftool-perl
wget --quiet --no-check-certificate "https://download.cpy.re/ffmpeg/ffmpeg-release-4.3.1-64bit-static.tar.xz" wget --quiet --no-check-certificate "https://download.cpy.re/ffmpeg/ffmpeg-release-4.3.1-64bit-static.tar.xz"
tar xf ffmpeg-release-4.3.1-64bit-static.tar.xz tar xf ffmpeg-release-4.3.1-64bit-static.tar.xz
mkdir -p $HOME/bin mkdir -p $HOME/bin

View File

@ -1,5 +1,23 @@
# Changelog # Changelog
## v4.1.1
### Security
* Strip EXIF data when processing images
### Docker
* Fix videos import by installing python 3
* Install `git` package (may be needed to install some plugins)
### Bug fixes
* Fix error when updating a live
* Fix performance regression when rendering HTML and feeds
* Fix player stuck by HTTP request error
## v4.1.0 ## v4.1.0
### IMPORTANT NOTES ### IMPORTANT NOTES

View File

@ -1,6 +1,6 @@
{ {
"name": "peertube-client", "name": "peertube-client",
"version": "4.1.0", "version": "4.1.1",
"private": true, "private": true,
"license": "AGPL-3.0", "license": "AGPL-3.0",
"author": { "author": {

View File

@ -1,7 +1,7 @@
{ {
"name": "peertube", "name": "peertube",
"description": "PeerTube, an ActivityPub-federated video streaming platform using P2P directly in your web browser.", "description": "PeerTube, an ActivityPub-federated video streaming platform using P2P directly in your web browser.",
"version": "4.1.0", "version": "4.1.1",
"private": true, "private": true,
"licence": "AGPL-3.0", "licence": "AGPL-3.0",
"engines": { "engines": {

View File

@ -118,6 +118,8 @@ async function autoResize (options: {
const sourceIsPortrait = sourceImage.getWidth() < sourceImage.getHeight() const sourceIsPortrait = sourceImage.getWidth() < sourceImage.getHeight()
const destIsPortraitOrSquare = newSize.width <= newSize.height const destIsPortraitOrSquare = newSize.width <= newSize.height
removeExif(sourceImage)
if (sourceIsPortrait && !destIsPortraitOrSquare) { if (sourceIsPortrait && !destIsPortraitOrSquare) {
const baseImage = sourceImage.cloneQuiet().cover(newSize.width, newSize.height) const baseImage = sourceImage.cloneQuiet().cover(newSize.width, newSize.height)
.color([ { apply: 'shade', params: [ 50 ] } ]) .color([ { apply: 'shade', params: [ 50 ] } ])
@ -144,6 +146,7 @@ function skipProcessing (options: {
const { sourceImage, newSize, imageBytes, inputExt, outputExt } = options const { sourceImage, newSize, imageBytes, inputExt, outputExt } = options
const { width, height } = newSize const { width, height } = newSize
if (hasExif(sourceImage)) return false
if (sourceImage.getWidth() > width || sourceImage.getHeight() > height) return false if (sourceImage.getWidth() > width || sourceImage.getHeight() > height) return false
if (inputExt !== outputExt) return false if (inputExt !== outputExt) return false
@ -154,3 +157,11 @@ function skipProcessing (options: {
return imageBytes <= 15 * kB return imageBytes <= 15 * kB
} }
function hasExif (image: Jimp) {
return !!(image.bitmap as any).exifBuffer
}
function removeExif (image: Jimp) {
(image.bitmap as any).exifBuffer = null
}

View File

@ -7,8 +7,13 @@ const sanitizeHtml = require('sanitize-html')
const markdownItEmoji = require('markdown-it-emoji/light') const markdownItEmoji = require('markdown-it-emoji/light')
const MarkdownItClass = require('markdown-it') const MarkdownItClass = require('markdown-it')
const markdownItWithHTML = new MarkdownItClass('default', { linkify: true, breaks: true, html: true }) const markdownItForSafeHtml = new MarkdownItClass('default', { linkify: true, breaks: true, html: true })
const markdownItWithoutHTML = new MarkdownItClass('default', { linkify: false, breaks: true, html: false }) .enable(TEXT_WITH_HTML_RULES)
.use(markdownItEmoji)
const markdownItForPlainText = new MarkdownItClass('default', { linkify: false, breaks: true, html: false })
.use(markdownItEmoji)
.use(plainTextPlugin)
const toSafeHtml = (text: string) => { const toSafeHtml = (text: string) => {
if (!text) return '' if (!text) return ''
@ -17,9 +22,7 @@ const toSafeHtml = (text: string) => {
const textWithLineFeed = text.replace(/<br.?\/?>/g, '\r\n') const textWithLineFeed = text.replace(/<br.?\/?>/g, '\r\n')
// Convert possible markdown (emojis, emphasis and lists) to html // Convert possible markdown (emojis, emphasis and lists) to html
const html = markdownItWithHTML.enable(TEXT_WITH_HTML_RULES) const html = markdownItForSafeHtml.render(textWithLineFeed)
.use(markdownItEmoji)
.render(textWithLineFeed)
// Convert to safe Html // Convert to safe Html
return sanitizeHtml(html, defaultSanitizeOptions) return sanitizeHtml(html, defaultSanitizeOptions)
@ -28,12 +31,10 @@ const toSafeHtml = (text: string) => {
const mdToOneLinePlainText = (text: string) => { const mdToOneLinePlainText = (text: string) => {
if (!text) return '' if (!text) return ''
markdownItWithoutHTML.use(markdownItEmoji) markdownItForPlainText.render(text)
.use(plainTextPlugin)
.render(text)
// Convert to safe Html // Convert to safe Html
return sanitizeHtml(markdownItWithoutHTML.plainText, textOnlySanitizeOptions) return sanitizeHtml(markdownItForPlainText.plainText, textOnlySanitizeOptions)
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -47,30 +48,38 @@ export {
// Thanks: https://github.com/wavesheep/markdown-it-plain-text // Thanks: https://github.com/wavesheep/markdown-it-plain-text
function plainTextPlugin (markdownIt: any) { function plainTextPlugin (markdownIt: any) {
let lastSeparator = ''
function plainTextRule (state: any) { function plainTextRule (state: any) {
const text = scan(state.tokens) const text = scan(state.tokens)
markdownIt.plainText = text.replace(/\s+/g, ' ') markdownIt.plainText = text
} }
function scan (tokens: any[]) { function scan (tokens: any[]) {
let lastSeparator = ''
let text = '' let text = ''
for (const token of tokens) { function buildSeparator (token: any) {
if (token.children !== null) {
text += scan(token.children)
continue
}
if (token.type === 'list_item_close') { if (token.type === 'list_item_close') {
lastSeparator = ', ' lastSeparator = ', '
} else if (token.type.endsWith('_close')) { }
if (token.tag === 'br' || token.type === 'paragraph_close') {
lastSeparator = ' ' lastSeparator = ' '
} else if (token.content) { }
text += lastSeparator }
text += token.content
for (const token of tokens) {
buildSeparator(token)
if (token.type !== 'inline') continue
for (const child of token.children) {
buildSeparator(child)
if (!child.content) continue
text += lastSeparator + child.content
lastSeparator = ''
} }
} }

View File

@ -37,7 +37,7 @@ const createTranscodingValidator = [
// Prefer using job info table instead of video state because before 4.0 failed transcoded video were stuck in "TO_TRANSCODE" state // Prefer using job info table instead of video state because before 4.0 failed transcoded video were stuck in "TO_TRANSCODE" state
const info = await VideoJobInfoModel.load(video.id) const info = await VideoJobInfoModel.load(video.id)
if (info && info.pendingTranscode !== 0) { if (info && info.pendingTranscode > 0) {
return res.fail({ return res.fail({
status: HttpStatusCode.CONFLICT_409, status: HttpStatusCode.CONFLICT_409,
message: 'This video is already being transcoded' message: 'This video is already being transcoded'

Binary file not shown.

Before

Width:  |  Height:  |  Size: 87 KiB

After

Width:  |  Height:  |  Size: 58 KiB

BIN
server/tests/fixtures/exif.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

BIN
server/tests/fixtures/exif.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

View File

@ -4,6 +4,7 @@ import 'mocha'
import { expect } from 'chai' import { expect } from 'chai'
import { readFile, remove } from 'fs-extra' import { readFile, remove } from 'fs-extra'
import { join } from 'path' import { join } from 'path'
import { execPromise } from '@server/helpers/core-utils'
import { buildAbsoluteFixturePath, root } from '@shared/core-utils' import { buildAbsoluteFixturePath, root } from '@shared/core-utils'
import { processImage } from '../../../server/helpers/image-utils' import { processImage } from '../../../server/helpers/image-utils'
@ -20,40 +21,77 @@ async function checkBuffers (path1: string, path2: string, equals: boolean) {
} }
} }
async function hasTitleExif (path: string) {
const result = JSON.parse(await execPromise(`exiftool -json ${path}`))
return result[0]?.Title === 'should be removed'
}
describe('Image helpers', function () { describe('Image helpers', function () {
const imageDestDir = join(root(), 'test-images') const imageDestDir = join(root(), 'test-images')
const imageDest = join(imageDestDir, 'test.jpg')
const imageDestJPG = join(imageDestDir, 'test.jpg')
const imageDestPNG = join(imageDestDir, 'test.png')
const thumbnailSize = { width: 223, height: 122 } const thumbnailSize = { width: 223, height: 122 }
it('Should skip processing if the source image is okay', async function () { it('Should skip processing if the source image is okay', async function () {
const input = buildAbsoluteFixturePath('thumbnail.jpg') const input = buildAbsoluteFixturePath('thumbnail.jpg')
await processImage(input, imageDest, thumbnailSize, true) await processImage(input, imageDestJPG, thumbnailSize, true)
await checkBuffers(input, imageDest, true) await checkBuffers(input, imageDestJPG, true)
}) })
it('Should not skip processing if the source image does not have the appropriate extension', async function () { it('Should not skip processing if the source image does not have the appropriate extension', async function () {
const input = buildAbsoluteFixturePath('thumbnail.png') const input = buildAbsoluteFixturePath('thumbnail.png')
await processImage(input, imageDest, thumbnailSize, true) await processImage(input, imageDestJPG, thumbnailSize, true)
await checkBuffers(input, imageDest, false) await checkBuffers(input, imageDestJPG, false)
}) })
it('Should not skip processing if the source image does not have the appropriate size', async function () { it('Should not skip processing if the source image does not have the appropriate size', async function () {
const input = buildAbsoluteFixturePath('preview.jpg') const input = buildAbsoluteFixturePath('preview.jpg')
await processImage(input, imageDest, thumbnailSize, true) await processImage(input, imageDestJPG, thumbnailSize, true)
await checkBuffers(input, imageDest, false) await checkBuffers(input, imageDestJPG, false)
}) })
it('Should not skip processing if the source image does not have the appropriate size', async function () { it('Should not skip processing if the source image does not have the appropriate size', async function () {
const input = buildAbsoluteFixturePath('thumbnail-big.jpg') const input = buildAbsoluteFixturePath('thumbnail-big.jpg')
await processImage(input, imageDest, thumbnailSize, true) await processImage(input, imageDestJPG, thumbnailSize, true)
await checkBuffers(input, imageDest, false) await checkBuffers(input, imageDestJPG, false)
})
it('Should strip exif for a jpg file that can not be copied', async function () {
const input = buildAbsoluteFixturePath('exif.jpg')
expect(await hasTitleExif(input)).to.be.true
await processImage(input, imageDestJPG, { width: 100, height: 100 }, true)
await checkBuffers(input, imageDestJPG, false)
expect(await hasTitleExif(imageDestJPG)).to.be.false
})
it('Should strip exif for a jpg file that could be copied', async function () {
const input = buildAbsoluteFixturePath('exif.jpg')
expect(await hasTitleExif(input)).to.be.true
await processImage(input, imageDestJPG, thumbnailSize, true)
await checkBuffers(input, imageDestJPG, false)
expect(await hasTitleExif(imageDestJPG)).to.be.false
})
it('Should strip exif for png', async function () {
const input = buildAbsoluteFixturePath('exif.png')
expect(await hasTitleExif(input)).to.be.true
await processImage(input, imageDestPNG, thumbnailSize, true)
expect(await hasTitleExif(imageDestPNG)).to.be.false
}) })
after(async function () { after(async function () {
await remove(imageDest) await remove(imageDestDir)
}) })
}) })

View File

@ -30,5 +30,11 @@ describe('Markdown helpers', function () {
expect(result).to.equal('Hello coucou') expect(result).to.equal('Hello coucou')
}) })
it('Should convert tags to plain text', function () {
const result = mdToOneLinePlainText(`#déconversion\n#newage\n#histoire`)
expect(result).to.equal('#déconversion #newage #histoire')
})
}) })
}) })

View File

@ -25,21 +25,21 @@ async function expectLogDoesNotContain (server: PeerTubeServer, str: string) {
expect(content.toString()).to.not.contain(str) expect(content.toString()).to.not.contain(str)
} }
async function testImage (url: string, imageName: string, imagePath: string, extension = '.jpg') { async function testImage (url: string, imageName: string, imageHTTPPath: string, extension = '.jpg') {
const res = await makeGetRequest({ const res = await makeGetRequest({
url, url,
path: imagePath, path: imageHTTPPath,
expectedStatus: HttpStatusCode.OK_200 expectedStatus: HttpStatusCode.OK_200
}) })
const body = res.body const body = res.body
const data = await readFile(join(root(), 'server', 'tests', 'fixtures', imageName + extension)) const data = await readFile(join(root(), 'server', 'tests', 'fixtures', imageName + extension))
const minLength = body.length - ((30 * body.length) / 100) const minLength = data.length - ((40 * data.length) / 100)
const maxLength = body.length + ((30 * body.length) / 100) const maxLength = data.length + ((40 * data.length) / 100)
expect(data.length).to.be.above(minLength, 'the generated image is way smaller than the recorded fixture') expect(body.length).to.be.above(minLength, 'the generated image is way smaller than the recorded fixture')
expect(data.length).to.be.below(maxLength, 'the generated image is way larger than the recorded fixture') expect(body.length).to.be.below(maxLength, 'the generated image is way larger than the recorded fixture')
} }
async function testFileExistsOrNot (server: PeerTubeServer, directory: string, filePath: string, exist: boolean) { async function testFileExistsOrNot (server: PeerTubeServer, directory: string, filePath: string, exist: boolean) {

View File

@ -31,6 +31,12 @@ $ sudo docker run -p 9444:9000 chocobozzz/s3-ninja
$ sudo docker run -p 10389:10389 chocobozzz/docker-test-openldap $ sudo docker run -p 10389:10389 chocobozzz/docker-test-openldap
``` ```
Ensure you also have these commands:
```
$ exiftool --help
```
### Test ### Test
To run all test suites: To run all test suites:
@ -39,7 +45,7 @@ To run all test suites:
$ npm run test # See scripts/test.sh to run a particular suite $ npm run test # See scripts/test.sh to run a particular suite
``` ```
Most of tests can be runned using: Most of tests can be run using:
```bash ```bash
TS_NODE_TRANSPILE_ONLY=true npm run mocha -- --timeout 30000 --exit -r ts-node/register -r tsconfig-paths/register --bail server/tests/api/videos/video-transcoder.ts TS_NODE_TRANSPILE_ONLY=true npm run mocha -- --timeout 30000 --exit -r ts-node/register -r tsconfig-paths/register --bail server/tests/api/videos/video-transcoder.ts

View File

@ -2,7 +2,7 @@ FROM node:14-bullseye-slim
# Install dependencies # Install dependencies
RUN apt update \ RUN apt update \
&& apt install -y --no-install-recommends openssl ffmpeg python3 ca-certificates gnupg gosu build-essential curl \ && apt install -y --no-install-recommends openssl ffmpeg python3 ca-certificates gnupg gosu build-essential curl git \
&& gosu nobody true \ && gosu nobody true \
&& rm /var/lib/apt/lists/* -fR && rm /var/lib/apt/lists/* -fR