1
0
Fork 0
mirror of https://github.com/HenkKalkwater/harbour-sailfin.git synced 2024-11-22 09:15:18 +00:00

Report playback progress and resume items

[Playback]: New: playback progress is reported to the Jellyfin server.
[Playback]: New: resume partly played items or start playing from the beginning if desired.

I also had to make some changes to the VideoPlayer, because the VideoHUD
got locked up when the player changed status from Buffering to Buffered
too quickly in succession, which occurs when trying to seek directly
after the application is able to.
This commit is contained in:
Chris Josten 2020-10-01 21:45:34 +02:00
parent 7221fda1d5
commit c01fcdcb54
13 changed files with 195 additions and 27 deletions

View file

@ -33,6 +33,14 @@ function timeToText(time) {
return hours + ":" + (minutes < 10 ? "0" : "") + minutes + ":" + (seconds < 10 ? "0" : "")+ seconds return hours + ":" + (minutes < 10 ? "0" : "") + minutes + ":" + (seconds < 10 ? "0" : "")+ seconds
} }
function msToTicks(ms) {
return ms * 10000;
}
function ticksToMs(ticks) {
return ticks / 10000;
}
function ticksToText(ticks) { function ticksToText(ticks) {
return timeToText(ticks / 10000); return timeToText(ticks / 10000);
} }

View file

@ -54,7 +54,7 @@ Rectangle {
anchors.fill: backgroundImage anchors.fill: backgroundImage
source: backgroundImage source: backgroundImage
opacity: dimmedOpacity opacity: dimmedOpacity
radius: 50 radius: 100
} }
Image { Image {

View file

@ -23,7 +23,8 @@ import Sailfish.Silica 1.0
Column { Column {
property alias imageSource : playImage.source property alias imageSource : playImage.source
property real imageAspectRatio: 1.0 property real imageAspectRatio: 1.0
signal playPressed() property real playProgress: 0.0
signal playPressed(bool startFromBeginning)
spacing: Theme.paddingLarge spacing: Theme.paddingLarge
BackgroundItem { BackgroundItem {
@ -42,7 +43,16 @@ Column {
anchors.centerIn: parent anchors.centerIn: parent
highlighted: parent.highlighted highlighted: parent.highlighted
} }
onClicked: playPressed() Rectangle {
anchors {
left: parent.left
bottom: parent.bottom
}
height: Theme.paddingMedium
color: Theme.highlightColor
width: parent.width * playProgress
}
onClicked: playPressed(false)
} }
Row { Row {
anchors { anchors {
@ -52,6 +62,12 @@ Column {
rightMargin: Theme.horizontalPageMargin rightMargin: Theme.horizontalPageMargin
} }
spacing: Theme.paddingMedium spacing: Theme.paddingMedium
IconButton {
id: playFromBeginning
icon.source: "image://theme/icon-m-backup"
visible: playProgress > 0
onClicked: playPressed(true)
}
IconButton { IconButton {
id: favouriteButton id: favouriteButton
icon.source: "image://theme/icon-m-favorite" icon.source: "image://theme/icon-m-favorite"

View file

@ -23,6 +23,7 @@ import Sailfish.Silica 1.0
import nl.netsoj.chris.Jellyfin 1.0 import nl.netsoj.chris.Jellyfin 1.0
import "videoplayer" import "videoplayer"
import "../"
/** /**
* A videoPlayer for Jellyfin videos * A videoPlayer for Jellyfin videos
@ -38,6 +39,7 @@ SilicaItem {
readonly property bool hudVisible: !hud.hidden || player.error !== MediaPlayer.NoError readonly property bool hudVisible: !hud.hidden || player.error !== MediaPlayer.NoError
property alias audioTrack: mediaSource.audioIndex property alias audioTrack: mediaSource.audioIndex
property alias subtitleTrack: mediaSource.subtitleIndex property alias subtitleTrack: mediaSource.subtitleIndex
property int startTicks: 0
// Force a Light on Dark theme since I doubt that there are persons who are willing to watch a Video // Force a Light on Dark theme since I doubt that there are persons who are willing to watch a Video
// on a white background. // on a white background.
@ -49,7 +51,6 @@ SilicaItem {
color: "black" color: "black"
} }
MediaSource { MediaSource {
id: mediaSource id: mediaSource
apiClient: ApiClient apiClient: ApiClient
@ -59,11 +60,16 @@ SilicaItem {
onStreamUrlChanged: { onStreamUrlChanged: {
if (mediaSource.streamUrl != "") { if (mediaSource.streamUrl != "") {
player.source = streamUrl player.source = streamUrl
//mediaPlayer.play()
} }
} }
} }
Connections {
target: player
onPlaybackStateChanged: mediaSource.state = player.playbackState
onPositionChanged: mediaSource.position = Utils.msToTicks(player.position)
}
VideoOutput { VideoOutput {
id: videoOutput id: videoOutput
@ -99,6 +105,18 @@ SilicaItem {
function stop() { function stop() {
player.stop() player.stop()
player.source = "" //player.source = ""
}
Connections {
id: playerReadyToSeek
target: player
onPlaybackStateChanged: {
if (startTicks > 0 && player.playbackState == MediaPlayer.PlayingState) {
console.log("Seeking to " + Utils.ticksToMs(startTicks))
player.seek(Utils.ticksToMs(startTicks))
playerReadyToSeek.enabled = false // Only seek the first time this property changes
}
}
} }
} }

View file

@ -70,6 +70,7 @@ Column {
onTracksChanged: { onTracksChanged: {
audioModel.clear() audioModel.clear()
subtitleModel.clear() subtitleModel.clear()
if (typeof tracks === "undefined") return
for(var i = 0; i < tracks.length; i++) { for(var i = 0; i < tracks.length; i++) {
var track = tracks[i]; var track = tracks[i];
switch(track.Type) { switch(track.Type) {

View file

@ -41,6 +41,10 @@ Rectangle {
color: Theme.errorColor color: Theme.errorColor
text: { text: {
switch(player.error) { switch(player.error) {
case MediaPlayer.NoError:
//: Just to be complete if the application shows a video playback error when there's no error.
qsTr("No error");
break;
case MediaPlayer.ResourceError: case MediaPlayer.ResourceError:
//: Video playback error: out of resources //: Video playback error: out of resources
qsTr("Resource allocation error") qsTr("Resource allocation error")

View file

@ -65,7 +65,11 @@ Item {
id: wakeupArea id: wakeupArea
enabled: true enabled: true
anchors.fill: parent anchors.fill: parent
onClicked: hidden ? videoHud.show(true) : videoHud.hide(true) onClicked: {
hidden ? videoHud.show(true) : videoHud.hide(true)
console.log("Trying")
}
} }
BusyIndicator { BusyIndicator {
@ -156,18 +160,21 @@ Item {
} }
function show(manual) { function show(manual) {
_manuallyActivated = manual
if (manual) { if (manual) {
_manuallyActivated = true
inactivityTimer.restart() inactivityTimer.restart()
} else { } else {
_manuallyActivated = false inactivityTimer.stop()
} }
opacity = 1 opacity = 1
} }
function hide(manual) { function hide(manual) {
// Don't hide if the user decided on their own to show the hud // Don't hide if the user decided on their own to show the hud
if (!manual && _manuallyActivated) return; //if (!manual && _manuallyActivated) return;
// Don't give in to the user if they want to hide the hud while it was forced upon them
/*if (!_manuallyActivated && manual) return;
_manuallyActivated = false;*/
opacity = 0 opacity = 0
} }

View file

@ -199,7 +199,7 @@ Page {
+ "/Images/Primary?maxHeight=" + height + "&tag=" + model.imageTags["Primary"] + "/Images/Primary?maxHeight=" + height + "&tag=" + model.imageTags["Primary"]
: ""*/ : ""*/
landscape: !Utils.usePortraitCover(model.type) landscape: !Utils.usePortraitCover(model.type)
progress: model.userData.PlayedPercentage / 100 progress: (typeof model.userData !== "undefined") ? model.userData.PlayedPercentage / 100 : 0.0
onClicked: { onClicked: {
pageStack.push(Utils.getPageUrl(model.mediaType, model.type), {"itemId": model.id}) pageStack.push(Utils.getPageUrl(model.mediaType, model.type), {"itemId": model.id})

View file

@ -33,6 +33,7 @@ Page {
property var itemData property var itemData
property int audioTrack property int audioTrack
property int subtitleTrack property int subtitleTrack
property int startTicks: 0
allowedOrientations: Orientation.All allowedOrientations: Orientation.All
showNavigationIndicator: videoPlayer.hudVisible showNavigationIndicator: videoPlayer.hudVisible
@ -45,6 +46,7 @@ Page {
title: itemData.Name title: itemData.Name
audioTrack: videoPage.audioTrack audioTrack: videoPage.audioTrack
subtitleTrack: videoPage.subtitleTrack subtitleTrack: videoPage.subtitleTrack
startTicks: videoPage.startTicks
onLandscapeChanged: { onLandscapeChanged: {
console.log("Is landscape: " + landscape) console.log("Is landscape: " + landscape)

View file

@ -47,10 +47,14 @@ BaseDetailPage {
PlayToolbar { PlayToolbar {
imageSource: Utils.itemImageUrl(ApiClient.baseUrl, itemData, "Primary", {"maxWidth": parent.width}) imageSource: Utils.itemImageUrl(ApiClient.baseUrl, itemData, "Primary", {"maxWidth": parent.width})
imageAspectRatio: itemData.PrimaryImageAspectRatio imageAspectRatio: itemData.PrimaryImageAspectRatio || 1.0
playProgress: itemData.UserData.PlayedPercentage / 100
onPlayPressed: pageStack.push(Qt.resolvedUrl("../VideoPage.qml"), onPlayPressed: pageStack.push(Qt.resolvedUrl("../VideoPage.qml"),
{"itemId": itemId, "itemData": itemData, "audioTrack": trackSelector.audioTrack, {"itemId": itemId, "itemData": itemData,
"subtitleTrack": trackSelector.subtitleTrack }) "audioTrack": trackSelector.audioTrack,
"subtitleTrack": trackSelector.subtitleTrack,
"startTicks": startFromBeginning ? 0.0
: itemData.UserData.PlaybackPositionTicks })
width: parent.width width: parent.width
} }
@ -66,7 +70,7 @@ BaseDetailPage {
PlainLabel { PlainLabel {
id: overviewText id: overviewText
text: itemData.Overview text: itemData.Overview || qsTr("No overview available")
font.pixelSize: Theme.fontSizeSmall font.pixelSize: Theme.fontSizeSmall
color: Theme.secondaryHighlightColor color: Theme.secondaryHighlightColor
} }

View file

@ -23,13 +23,15 @@ namespace Jellyfin {
MediaSource::MediaSource(QObject *parent) MediaSource::MediaSource(QObject *parent)
: QObject(parent) { : QObject(parent) {
m_updateTimer.setInterval(10000); // 10 seconds
m_updateTimer.setSingleShot(false);
connect(&m_updateTimer, &QTimer::timeout, this, &MediaSource::updatePlaybackInfo);
} }
void MediaSource::fetchStreamUrl() { void MediaSource::fetchStreamUrl() {
QUrlQuery params; QUrlQuery params;
params.addQueryItem("UserId", m_apiClient->userId()); params.addQueryItem("UserId", m_apiClient->userId());
params.addQueryItem("StartTimeTicks", "0"); params.addQueryItem("StartTimeTicks", QString::number(m_position));
params.addQueryItem("IsPlayback", "true"); params.addQueryItem("IsPlayback", "true");
params.addQueryItem("AutoOpenLiveStream", this->m_autoOpen ? "true" : "false"); params.addQueryItem("AutoOpenLiveStream", this->m_autoOpen ? "true" : "false");
params.addQueryItem("MediaSourceId", this->m_itemId); params.addQueryItem("MediaSourceId", this->m_itemId);
@ -51,7 +53,7 @@ void MediaSource::fetchStreamUrl() {
this->m_streamUrl = this->m_apiClient->baseUrl() this->m_streamUrl = this->m_apiClient->baseUrl()
+ mediaSources[0].toObject()["TranscodingUrl"].toString(); + mediaSources[0].toObject()["TranscodingUrl"].toString();
this->m_playMethod = Transcode;
emit this->streamUrlChanged(this->m_streamUrl); emit this->streamUrlChanged(this->m_streamUrl);
qDebug() << "Found stream url: " << this->m_streamUrl; qDebug() << "Found stream url: " << this->m_streamUrl;
} }
@ -79,16 +81,83 @@ void MediaSource::setStreamUrl(const QString &streamUrl) {
emit streamUrlChanged(streamUrl); emit streamUrlChanged(streamUrl);
} }
void MediaSource::play() { void MediaSource::setPosition(qint64 position) {
//todo: playback reporting if (position == 0 && m_position != 0) {
// Save the old position when stop gets called. The QMediaPlayer will try to set
// position to 0 when stopped, but we don't want to report that to Jellyfin. We
// want the old position.
m_stopPosition = m_position;
}
m_position = position;
emit positionChanged(position);
} }
void MediaSource::pause() { void MediaSource::setState(QMediaPlayer::State newState) {
//todo: playback reporting if (m_state == newState) return;
if (m_state == QMediaPlayer::StoppedState) {
// We're transitioning from stopped to either playing or paused.
// Set up the recurring timer
m_updateTimer.start();
postPlaybackInfo(Started);
} else if (newState == QMediaPlayer::StoppedState) {
// We've stopped playing the media. Post a stop signal.
m_updateTimer.stop();
postPlaybackInfo(Stopped);
} else {
postPlaybackInfo(Progress);
} }
void MediaSource::stop() {
//todo: playback reporting m_state = newState;
emit this->stateChanged(newState);
}
void MediaSource::updatePlaybackInfo() {
postPlaybackInfo(Progress);
}
void MediaSource::postPlaybackInfo(PlaybackInfoType type) {
QJsonObject root;
root["ItemId"] = m_itemId;
root["SessionId"] = m_playSessionId;
switch(type) {
case Started: // FALLTHROUGH
case Progress:
root["IsPaused"] = m_state != QMediaPlayer::PlayingState;
root["IsMuted"] = false;
root["AudioStreamIndex"] = m_audioIndex;
root["SubtitleStreamIndex"] = m_subtitleIndex;
root["PlayMethod"] = QVariant::fromValue(m_playMethod).toString();
root["PositionTicks"] = m_position;
break;
case Stopped:
root["PositionTicks"] = m_stopPosition;
break;
}
QString path;
switch (type) {
case Started:
path = "/Sessions/Playing";
break;
case Progress:
path = "/Sessions/Playing/Progress";
break;
case Stopped:
path = "/Sessions/Playing/Stopped";
break;
}
QNetworkReply *rep = m_apiClient->post(path, QJsonDocument(root));
connect(rep, &QNetworkReply::finished, this, [rep](){
rep->deleteLater();
});
m_apiClient->setDefaultErrorHandler(rep);
} }
} }

View file

@ -23,6 +23,7 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#include <QJsonArray> #include <QJsonArray>
#include <QJsonObject> #include <QJsonObject>
#include <QObject> #include <QObject>
#include <QVariant>
#include <QUrlQuery> #include <QUrlQuery>
@ -36,6 +37,13 @@ namespace Jellyfin {
class MediaSource : public QObject { class MediaSource : public QObject {
Q_OBJECT Q_OBJECT
public: public:
enum PlayMethod {
Transcode,
Stream,
DirectPlay
};
Q_ENUM(PlayMethod)
explicit MediaSource(QObject *parent = nullptr); explicit MediaSource(QObject *parent = nullptr);
Q_PROPERTY(ApiClient *apiClient MEMBER m_apiClient) Q_PROPERTY(ApiClient *apiClient MEMBER m_apiClient)
Q_PROPERTY(QString itemId READ itemId WRITE setItemId NOTIFY itemIdChanged) Q_PROPERTY(QString itemId READ itemId WRITE setItemId NOTIFY itemIdChanged)
@ -43,10 +51,17 @@ public:
Q_PROPERTY(bool autoOpen MEMBER m_autoOpen NOTIFY autoOpenChanged) Q_PROPERTY(bool autoOpen MEMBER m_autoOpen NOTIFY autoOpenChanged)
Q_PROPERTY(int audioIndex MEMBER m_audioIndex NOTIFY audioIndexChanged) Q_PROPERTY(int audioIndex MEMBER m_audioIndex NOTIFY audioIndexChanged)
Q_PROPERTY(int subtitleIndex MEMBER m_subtitleIndex NOTIFY subtitleIndexChanged) Q_PROPERTY(int subtitleIndex MEMBER m_subtitleIndex NOTIFY subtitleIndexChanged)
Q_PROPERTY(qint64 position MEMBER m_position WRITE setPosition NOTIFY positionChanged)
Q_PROPERTY(QMediaPlayer::State state READ state WRITE setState NOTIFY stateChanged)
QString itemId() const { return m_itemId; } QString itemId() const { return m_itemId; }
void setItemId(const QString &newItemId); void setItemId(const QString &newItemId);
QMediaPlayer::State state() const { return m_state; }
void setState(QMediaPlayer::State newState);
void setPosition(qint64 position);
QString streamUrl() const { return m_streamUrl; } QString streamUrl() const { return m_streamUrl; }
signals: signals:
void itemIdChanged(const QString &newItemId); void itemIdChanged(const QString &newItemId);
@ -54,19 +69,24 @@ signals:
void autoOpenChanged(bool autoOpen); void autoOpenChanged(bool autoOpen);
void audioIndexChanged(int audioIndex); void audioIndexChanged(int audioIndex);
void subtitleIndexChanged(int subtitleIndex); void subtitleIndexChanged(int subtitleIndex);
void positionChanged(qint64 position);
void stateChanged(QMediaPlayer::State state);
public slots: public slots:
void play(); void updatePlaybackInfo();
void pause();
void stop();
private: private:
QTimer m_updateTimer;
ApiClient *m_apiClient = nullptr; ApiClient *m_apiClient = nullptr;
QString m_itemId; QString m_itemId;
QString m_streamUrl; QString m_streamUrl;
QString m_playSessionId; QString m_playSessionId;
int m_audioIndex = 0; int m_audioIndex = 0;
int m_subtitleIndex = -1; int m_subtitleIndex = -1;
qint64 m_position = 0;
qint64 m_stopPosition = 0;
PlayMethod m_playMethod;
QMediaPlayer::State m_state = QMediaPlayer::StoppedState;
/** /**
* @brief Whether to automatically open the livestream of the item; * @brief Whether to automatically open the livestream of the item;
@ -75,6 +95,16 @@ private:
void fetchStreamUrl(); void fetchStreamUrl();
void setStreamUrl(const QString &streamUrl); void setStreamUrl(const QString &streamUrl);
// Factor to multiply with when converting from milliseconds to ticks.
const int MS_TICK_FACTOR = 10000;
enum PlaybackInfoType { Started, Stopped, Progress };
/**
* @brief Posts the playback information
*/
void postPlaybackInfo(PlaybackInfoType type);
}; };
} }

View file

@ -125,6 +125,10 @@
<source>Overview</source> <source>Overview</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message>
<source>No overview available</source>
<translation type="unfinished"></translation>
</message>
</context> </context>
<context> <context>
<name>FilmPage</name> <name>FilmPage</name>
@ -318,6 +322,11 @@
<extracomment>Button to retry loading a video after a failure</extracomment> <extracomment>Button to retry loading a video after a failure</extracomment>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message>
<source>No error</source>
<extracomment>Just to be complete if the application shows a video playback error when there&apos;s no error.</extracomment>
<translation type="unfinished"></translation>
</message>
</context> </context>
<context> <context>
<name>VideoPage</name> <name>VideoPage</name>