mirror of
https://github.com/HenkKalkwater/harbour-sailfin.git
synced 2024-11-22 09:15:18 +00:00
Add user-configurable playback settings
* PlaybackManager has been updated to workaround limitiations in QtMultimedia * PlaybackManager now sends the DeviceProfile to the server when determining the playback url. This makes the Jellyfin server send information back about transcoding. * The DeviceProfile type has been changed from an QJsonObject into the DTO generated by the OpenAPI descripton. * A settings page has been added on SailfishOS that allows the user to configure the PlaybackManager to their whishes. * The DebugInfo page on SailfishOS now persists its settings (closes #8)
This commit is contained in:
parent
64ad37707c
commit
6bfe783bec
|
@ -96,7 +96,7 @@ public:
|
|||
Q_PROPERTY(QString baseUrl READ baseUrl WRITE setBaseUrl NOTIFY baseUrlChanged)
|
||||
Q_PROPERTY(bool authenticated READ authenticated WRITE setAuthenticated NOTIFY authenticatedChanged)
|
||||
Q_PROPERTY(QString userId READ userId NOTIFY userIdChanged)
|
||||
Q_PROPERTY(QJsonObject deviceProfile READ deviceProfile NOTIFY deviceProfileChanged)
|
||||
Q_PROPERTY(QJsonObject deviceProfile READ deviceProfileJson NOTIFY deviceProfileChanged)
|
||||
Q_PROPERTY(QString version READ version)
|
||||
Q_PROPERTY(EventBus *eventbus READ eventbus FINAL)
|
||||
Q_PROPERTY(Jellyfin::WebSocket *websocket READ websocket FINAL)
|
||||
|
@ -139,8 +139,9 @@ public:
|
|||
*/
|
||||
QVariantList supportedCommands() const ;
|
||||
void setSupportedCommands(QVariantList newSupportedCommands);
|
||||
const QJsonObject &deviceProfile() const;
|
||||
const QJsonObject &playbackDeviceProfile() const;
|
||||
const QJsonObject deviceProfileJson() const;
|
||||
QSharedPointer<DTO::DeviceProfile> deviceProfile() const;
|
||||
const QJsonObject clientCapabilities() const;
|
||||
/**
|
||||
* @brief Retrieves the authentication token. Null QString if not authenticated.
|
||||
* @note This is not the full authentication header, just the token.
|
||||
|
|
|
@ -30,10 +30,12 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
|||
|
||||
#include <QtMultimedia/QMediaPlayer>
|
||||
|
||||
#include "../dto/deviceprofile.h"
|
||||
|
||||
namespace Jellyfin {
|
||||
namespace Model {
|
||||
namespace DeviceProfile {
|
||||
QJsonObject generateProfile();
|
||||
DTO::DeviceProfile generateProfile();
|
||||
// Transport
|
||||
bool supportsHls();
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ namespace ViewModel {
|
|||
class Settings : public QObjectSettingsWrapper {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(bool allowTranscoding READ allowTranscoding WRITE setAllowTranscoding NOTIFY allowTranscodingChanged)
|
||||
Q_PROPERTY(int maxBitRate READ maxBitRate WRITE setMaxBitRate NOTIFY maxBitRateChanged)
|
||||
Q_PROPERTY(int maxStreamingBitRate READ maxStreamingBitRate WRITE setMaxStreamingBitRate NOTIFY maxStreamingBitRateChanged)
|
||||
public:
|
||||
explicit Settings(ApiClient *apiClient);
|
||||
virtual ~Settings();
|
||||
|
@ -42,14 +42,14 @@ public:
|
|||
bool allowTranscoding() const;
|
||||
void setAllowTranscoding(bool allowTranscoding);
|
||||
|
||||
int maxBitRate() const;
|
||||
void setMaxBitRate(int newMaxBitRate);
|
||||
int maxStreamingBitRate() const;
|
||||
void setMaxStreamingBitRate(int newMaxBitRate);
|
||||
signals:
|
||||
void allowTranscodingChanged(bool newAllowTranscoding);
|
||||
void maxBitRateChanged(int newMaxBitRate);
|
||||
void maxStreamingBitRateChanged(int newMaxBitRate);
|
||||
private:
|
||||
bool m_allowTranscoding = true;
|
||||
int m_maxBitRate = 5000000;
|
||||
int m_maxStreamingBitRate = 5000000;
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -19,11 +19,15 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
|||
|
||||
#include "JellyfinQt/apiclient.h"
|
||||
|
||||
#include <QSharedPointer>
|
||||
|
||||
#include "JellyfinQt/dto/clientcapabilitiesdto.h"
|
||||
#include "JellyfinQt/support/jsonconv.h"
|
||||
#include "JellyfinQt/viewmodel/settings.h"
|
||||
#include "JellyfinQt/websocket.h"
|
||||
|
||||
|
||||
|
||||
namespace Jellyfin {
|
||||
|
||||
class ApiClientPrivate {
|
||||
|
@ -45,8 +49,8 @@ public:
|
|||
QString userId;
|
||||
|
||||
bool online = true;
|
||||
QJsonObject deviceProfile;
|
||||
QJsonObject playbackDeviceProfile;
|
||||
QSharedPointer<DTO::DeviceProfile> deviceProfile;
|
||||
QSharedPointer<DTO::ClientCapabilitiesDto> clientCapabilities;
|
||||
QVariantList supportedCommands;
|
||||
|
||||
bool authenticated = false;
|
||||
|
@ -70,6 +74,9 @@ ApiClient::ApiClient(QObject *parent)
|
|||
connect(d->credManager, &CredentialsManager::usersListed, this, &ApiClient::credManagerUsersListed);
|
||||
connect(d->credManager, &CredentialsManager::tokenRetrieved, this, &ApiClient::credManagerTokenRetrieved);
|
||||
generateDeviceProfile();
|
||||
connect(d->settings, &ViewModel::Settings::maxStreamingBitRateChanged, this, [d](qint32 newBitrate){
|
||||
d->deviceProfile->setMaxStreamingBitrate(newBitrate);
|
||||
});
|
||||
}
|
||||
|
||||
ApiClient::~ApiClient() {
|
||||
|
@ -149,13 +156,18 @@ void ApiClient::setSupportedCommands(QVariantList newSupportedCommands) {
|
|||
d->supportedCommands = newSupportedCommands;
|
||||
emit supportedCommandsChanged();
|
||||
}
|
||||
const QJsonObject &ApiClient::deviceProfile() const {
|
||||
QSharedPointer<DTO::DeviceProfile> ApiClient::deviceProfile() const {
|
||||
Q_D(const ApiClient);
|
||||
return d->deviceProfile;
|
||||
}
|
||||
const QJsonObject &ApiClient::playbackDeviceProfile() const {
|
||||
|
||||
const QJsonObject ApiClient::deviceProfileJson() const {
|
||||
Q_D(const ApiClient);
|
||||
return d->playbackDeviceProfile;
|
||||
return d->deviceProfile->toJson();
|
||||
}
|
||||
const QJsonObject ApiClient::clientCapabilities() const {
|
||||
Q_D(const ApiClient);
|
||||
return d->clientCapabilities->toJson();
|
||||
}
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// BASE HTTP METHODS //
|
||||
|
@ -367,26 +379,7 @@ void ApiClient::deleteSession() {
|
|||
|
||||
void ApiClient::postCapabilities() {
|
||||
Q_D(const ApiClient);
|
||||
QJsonObject capabilities;
|
||||
QList<DTO::GeneralCommandType> supportedCommands;
|
||||
supportedCommands.reserve(d->supportedCommands.size());
|
||||
for (int i = 0; i < d->supportedCommands.size(); i++) {
|
||||
if (d->supportedCommands[i].canConvert<DTO::GeneralCommandType>()) {
|
||||
supportedCommands.append(d->supportedCommands[i].value<DTO::GeneralCommandType>());
|
||||
}
|
||||
}
|
||||
QList<int> foo = {1, 2, 3};
|
||||
qDebug() << Support::toJsonValue<int>(3713);
|
||||
qDebug() << Support::toJsonValue<QList<int>>(foo);
|
||||
capabilities["SupportedCommands"] = Support::toJsonValue<QList<DTO::GeneralCommandType>>(supportedCommands);
|
||||
capabilities["SupportsPersistentIdentifier"] = true;
|
||||
capabilities["SupportsMediaControl"] = false;
|
||||
capabilities["SupportsSync"] = false;
|
||||
capabilities["SupportsContentUploading"] = false;
|
||||
capabilities["AppStoreUrl"] = "https://chris.netsoj.nl/projects/harbour-sailfin";
|
||||
capabilities["IconUrl"] = "https://chris.netsoj.nl/static/img/logo.png";
|
||||
capabilities["DeviceProfile"] = d->deviceProfile;
|
||||
QNetworkReply *rep = post("/Sessions/Capabilities/Full", QJsonDocument(capabilities));
|
||||
QNetworkReply *rep = post("/Sessions/Capabilities/Full", QJsonDocument(d->clientCapabilities->toJson()));
|
||||
setDefaultErrorHandler(rep);
|
||||
}
|
||||
|
||||
|
@ -397,18 +390,33 @@ QString ApiClient::downloadUrl(const QString &itemId) const {
|
|||
|
||||
void ApiClient::generateDeviceProfile() {
|
||||
Q_D(ApiClient);
|
||||
QJsonObject root = Model::DeviceProfile::generateProfile();
|
||||
d->playbackDeviceProfile = QJsonObject(root);
|
||||
root["Name"] = d->deviceName;
|
||||
root["Id"] = d->deviceId;
|
||||
root["FriendlyName"] = QSysInfo::prettyProductName();
|
||||
QJsonArray playableMediaTypes;
|
||||
playableMediaTypes.append("Audio");
|
||||
playableMediaTypes.append("Video");
|
||||
playableMediaTypes.append("Photo");
|
||||
root["PlayableMediaTypes"] = playableMediaTypes;
|
||||
QSharedPointer<DTO::DeviceProfile> deviceProfile = QSharedPointer<DTO::DeviceProfile>::create(Model::DeviceProfile::generateProfile());
|
||||
deviceProfile->setName(d->deviceName);
|
||||
deviceProfile->setJellyfinId(d->deviceId);
|
||||
deviceProfile->setFriendlyName(QSysInfo::prettyProductName());
|
||||
deviceProfile->setMaxStreamingBitrate(d->settings->maxStreamingBitRate());
|
||||
d->deviceProfile = deviceProfile;
|
||||
|
||||
d->deviceProfile = root;
|
||||
QList<DTO::GeneralCommandType> supportedCommands;
|
||||
supportedCommands.reserve(d->supportedCommands.size());
|
||||
for (int i = 0; i < d->supportedCommands.size(); i++) {
|
||||
if (d->supportedCommands[i].canConvert<DTO::GeneralCommandType>()) {
|
||||
supportedCommands.append(d->supportedCommands[i].value<DTO::GeneralCommandType>());
|
||||
}
|
||||
}
|
||||
|
||||
QSharedPointer<DTO::ClientCapabilitiesDto> clientCapabilities = QSharedPointer<DTO::ClientCapabilitiesDto>::create();
|
||||
clientCapabilities->setPlayableMediaTypes({"Audio", "Video", "Photo"});
|
||||
clientCapabilities->setDeviceProfile(deviceProfile);
|
||||
clientCapabilities->setSupportedCommands(supportedCommands);
|
||||
clientCapabilities->setAppStoreUrl("https://chris.netsoj.nl/projects/harbour-sailfin");
|
||||
clientCapabilities->setIconUrl("https://chris.netsoj.nl/static/img/logo.png");
|
||||
clientCapabilities->setSupportsPersistentIdentifier(true);
|
||||
clientCapabilities->setSupportsSync(false);
|
||||
clientCapabilities->setSupportsMediaControl(false);
|
||||
clientCapabilities->setSupportsContentUploading(false);
|
||||
|
||||
d->clientCapabilities = clientCapabilities;
|
||||
emit deviceProfileChanged();
|
||||
}
|
||||
|
||||
|
|
|
@ -22,6 +22,19 @@
|
|||
namespace Jellyfin {
|
||||
namespace Model {
|
||||
|
||||
DTO::ProfileCondition createCondition(DTO::ProfileConditionValue property,
|
||||
DTO::ProfileConditionType condition,
|
||||
const QString &value,
|
||||
bool isRequired = true) {
|
||||
DTO::ProfileCondition result;
|
||||
result.setProperty(property);
|
||||
result.setCondition(condition);
|
||||
result.setValue(value);
|
||||
result.setIsRequired(isRequired);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
bool DeviceProfile::supportsHls() {
|
||||
return true;
|
||||
}
|
||||
|
@ -43,9 +56,9 @@ int DeviceProfile::maxStreamingBitrate() {
|
|||
return 5000000;
|
||||
}
|
||||
|
||||
QJsonObject DeviceProfile::generateProfile() {
|
||||
DTO::DeviceProfile DeviceProfile::generateProfile() {
|
||||
using JsonPair = QPair<QString, QJsonValue>;
|
||||
QJsonObject profile;
|
||||
DTO::DeviceProfile profile;
|
||||
|
||||
QStringList audioCodes = {
|
||||
"aac",
|
||||
|
@ -78,161 +91,155 @@ QJsonObject DeviceProfile::generateProfile() {
|
|||
videoAudioCodecs.append("mp3");
|
||||
hlsVideoAudioCodecs.append("mp3");
|
||||
}
|
||||
videoAudioCodecs.append("aac");
|
||||
hlsVideoAudioCodecs.append("aac");
|
||||
|
||||
QJsonArray codecProfiles = {};
|
||||
codecProfiles.append(QJsonObject {
|
||||
JsonPair("Codec", "aac"),
|
||||
JsonPair("Conditions", QJsonArray {
|
||||
QJsonObject {
|
||||
JsonPair("Property", "IsSecondaryAudio"),
|
||||
JsonPair("Condition", "Equals"),
|
||||
JsonPair("Value", "false"),
|
||||
JsonPair("IsRequired", false)
|
||||
}
|
||||
}),
|
||||
JsonPair("Type", "VideoAudio")
|
||||
using CondVal = DTO::ProfileConditionValue;
|
||||
using Condition = DTO::ProfileConditionType;
|
||||
|
||||
|
||||
// AAC
|
||||
DTO::CodecProfile codecProfile1;
|
||||
codecProfile1.setCodec("aac");
|
||||
QList<DTO::ProfileCondition> codecProfile1Conditions;
|
||||
codecProfile1Conditions.append(createCondition(CondVal::IsSecondaryAudio,
|
||||
Condition::Equals,
|
||||
"false",
|
||||
false));
|
||||
codecProfile1.setConditions(codecProfile1Conditions);
|
||||
codecProfile1.setType(DTO::CodecType::VideoAudio);
|
||||
|
||||
|
||||
DTO::CodecProfile codecProfile2;
|
||||
codecProfile2.setCodec("h264");
|
||||
codecProfile2.setConditions({
|
||||
createCondition(CondVal::IsAnamorphic,
|
||||
Condition::NotEquals,
|
||||
"true", false),
|
||||
createCondition(CondVal::VideoProfile,
|
||||
Condition::EqualsAny,
|
||||
"baseline|constrained baseline", false), //"high|main|baseline|constrained baseline"
|
||||
createCondition(CondVal::VideoLevel,
|
||||
Condition::LessThanEqual,
|
||||
"51", false),
|
||||
createCondition(CondVal::IsInterlaced,
|
||||
Condition::NotEquals,
|
||||
"true", false)
|
||||
});
|
||||
codecProfiles.append(QJsonObject {
|
||||
JsonPair("Codec", "h264"),
|
||||
JsonPair("Conditions", QJsonArray {
|
||||
QJsonObject {
|
||||
JsonPair("Property", "IsAnamorphic"),
|
||||
JsonPair("Condition", "NotEquals"),
|
||||
JsonPair("Value", "true"),
|
||||
JsonPair("IsRequired", false)
|
||||
},
|
||||
QJsonObject {
|
||||
JsonPair("Property", "VideoProfile"),
|
||||
JsonPair("Condition", "EqualsAny"),
|
||||
JsonPair("Value", "baseline|constrained baseline"), //"high|main|baseline|constrained baseline"),
|
||||
JsonPair("IsRequired", false),
|
||||
},
|
||||
QJsonObject {
|
||||
JsonPair("Property", "VideoLevel"),
|
||||
JsonPair("Condition", "LessThanEqual"),
|
||||
JsonPair("Value", "51"),
|
||||
JsonPair("IsRequired", false)
|
||||
},
|
||||
QJsonObject {
|
||||
JsonPair("Property", "IsInterlaced"),
|
||||
JsonPair("Condition", "NotEquals"),
|
||||
JsonPair("Value", "true"),
|
||||
JsonPair("IsRequired", false)
|
||||
}
|
||||
}),
|
||||
JsonPair("Type", "Video")
|
||||
});
|
||||
|
||||
QJsonArray transcodingProfiles = {};
|
||||
|
||||
codecProfile2.setType(DTO::CodecType::Video);
|
||||
QList<DTO::CodecProfile> codecProfiles = {
|
||||
codecProfile1,
|
||||
codecProfile2
|
||||
};
|
||||
// Hard coded nr 1:
|
||||
QJsonObject transcoding1;
|
||||
transcoding1["AudioCodec"] = "aac";
|
||||
transcoding1["BreakOnNonKeyFrames"] =true;
|
||||
transcoding1["Container"] = "ts";
|
||||
transcoding1["Context"] = "Streaming";
|
||||
transcoding1["MaxAudioChannels"] = "2";
|
||||
transcoding1["MinSegments"] = "1";
|
||||
transcoding1["Protocol"] = "hls";
|
||||
transcoding1["Type"] = "Audio";
|
||||
transcodingProfiles.append(transcoding1);
|
||||
|
||||
DTO::TranscodingProfile transcoding1;
|
||||
transcoding1.setAudioCodec("aac");
|
||||
transcoding1.setBreakOnNonKeyFrames(true);
|
||||
transcoding1.setContainer("ts");
|
||||
transcoding1.setContext(DTO::EncodingContext::Streaming);
|
||||
transcoding1.setMaxAudioChannels("2");
|
||||
transcoding1.setMinSegments(1);
|
||||
transcoding1.setProtocol("hls");
|
||||
transcoding1.setType(DTO::DlnaProfileType::Audio);
|
||||
// Hard code nr 2
|
||||
transcodingProfiles.append(QJsonObject({
|
||||
JsonPair("AudioCodec", "mp3,aac"),
|
||||
JsonPair("BreakOnNonKeyFrames", true),
|
||||
JsonPair("Container", "ts"),
|
||||
JsonPair("Context", "Streaming"),
|
||||
JsonPair("MaxAudioChannels", "2"),
|
||||
JsonPair("MinSegments", 1),
|
||||
JsonPair("Protocol", "hls"),
|
||||
JsonPair("Type", "Video"),
|
||||
JsonPair("VideoCodec", "h264")
|
||||
}));
|
||||
DTO::TranscodingProfile transcoding2;
|
||||
transcoding2.setAudioCodec("mp3,aac");
|
||||
transcoding2.setBreakOnNonKeyFrames(true);
|
||||
transcoding2.setContainer("ts");
|
||||
transcoding2.setContext(DTO::EncodingContext::Streaming);
|
||||
transcoding2.setMaxAudioChannels("2");
|
||||
transcoding2.setMinSegments(1);
|
||||
transcoding2.setProtocol("hls");
|
||||
transcoding2.setType(DTO::DlnaProfileType::Video);
|
||||
transcoding2.setVideoCodec("h264");
|
||||
|
||||
// Fallback
|
||||
transcodingProfiles.append(QJsonObject {
|
||||
JsonPair("Container", "mp4"),
|
||||
JsonPair("Type", "Video"),
|
||||
JsonPair("AudioCodec", videoAudioCodecs.join(',')),
|
||||
JsonPair("VideoCodec", "h264"),
|
||||
JsonPair("Context", "Static"),
|
||||
JsonPair("Protocol", "http")
|
||||
});
|
||||
DTO::TranscodingProfile transcoding3;
|
||||
transcoding3.setContainer("mp4");
|
||||
transcoding3.setType(DTO::DlnaProfileType::Video);
|
||||
transcoding3.setAudioCodec(videoAudioCodecs.join(','));
|
||||
transcoding3.setVideoCodec("h264");
|
||||
transcoding3.setContext(DTO::EncodingContext::Static);
|
||||
transcoding3.setProtocol("http");
|
||||
|
||||
QList<DTO::TranscodingProfile> transcodingProfiles = {
|
||||
transcoding1, transcoding2, transcoding3
|
||||
};
|
||||
|
||||
if (supportsHls() && !hlsVideoAudioCodecs.isEmpty()) {
|
||||
transcodingProfiles.append(QJsonObject {
|
||||
JsonPair("Container", "ts"),
|
||||
JsonPair("Type", "Video"),
|
||||
JsonPair("AudioCodec", hlsVideoAudioCodecs.join(",")),
|
||||
JsonPair("VideoCodec", hlsVideoCodecs.join(",")),
|
||||
JsonPair("Context", "Streaming"),
|
||||
JsonPair("Protocol", "hls"),
|
||||
JsonPair("MaxAudioChannels", "2"),
|
||||
JsonPair("MinSegments", "1"),
|
||||
JsonPair("BreakOnNonKeyFrames", true)
|
||||
});
|
||||
DTO::TranscodingProfile transcoding4;
|
||||
transcoding4.setContainer("ts");
|
||||
transcoding4.setType(DTO::DlnaProfileType::Video);
|
||||
transcoding4.setAudioCodec(hlsVideoAudioCodecs.join(','));
|
||||
transcoding4.setVideoCodec(hlsVideoCodecs.join(','));
|
||||
transcoding4.setContext(DTO::EncodingContext::Streaming);
|
||||
transcoding4.setProtocol("hls");
|
||||
transcoding4.setMaxAudioChannels("2");
|
||||
transcoding4.setMinSegments(1);
|
||||
transcoding4.setBreakOnNonKeyFrames(true);
|
||||
transcodingProfiles.append(transcoding4);
|
||||
}
|
||||
|
||||
// Response profiles (or whatever it actually does?)
|
||||
QJsonArray responseProfiles = {};
|
||||
responseProfiles.append(QJsonObject({
|
||||
JsonPair("Type", "Video"),
|
||||
JsonPair("Container", "m4v"),
|
||||
JsonPair("MimeType", "video/mp4")
|
||||
}));
|
||||
DTO::ResponseProfile responseProfile1;
|
||||
responseProfile1.setType(DTO::DlnaProfileType::Video);
|
||||
responseProfile1.setContainer("m4v");
|
||||
responseProfile1.setMimeType("video/mp4");
|
||||
QList<DTO::ResponseProfile> responseProfiles = {
|
||||
responseProfile1
|
||||
};
|
||||
|
||||
// Direct play profiles
|
||||
// Video
|
||||
QJsonArray directPlayProfiles;
|
||||
directPlayProfiles.append(QJsonObject {
|
||||
JsonPair("Container", "mp4,m4v"),
|
||||
JsonPair("Type", "Video"),
|
||||
JsonPair("VideoCodec", mp4VideoCodecs.join(',')),
|
||||
JsonPair("AudioCodec", videoAudioCodecs.join(','))
|
||||
});
|
||||
directPlayProfiles.append(QJsonObject {
|
||||
JsonPair("Container", "mkv"),
|
||||
JsonPair("Type", "Video"),
|
||||
JsonPair("VideoCodec", mp4VideoCodecs.join(',')),
|
||||
JsonPair("AudioCodec", videoAudioCodecs.join(','))
|
||||
});
|
||||
DTO::DirectPlayProfile directPlayProfile1;
|
||||
directPlayProfile1.setContainer("mp4,m4v");
|
||||
directPlayProfile1.setType(DTO::DlnaProfileType::Video);
|
||||
directPlayProfile1.setVideoCodec(mp4VideoCodecs.join(','));
|
||||
directPlayProfile1.setAudioCodec(videoAudioCodecs.join(','));
|
||||
|
||||
DTO::DirectPlayProfile directPlayProfile2;
|
||||
directPlayProfile2.setContainer("mkv");
|
||||
directPlayProfile2.setType(DTO::DlnaProfileType::Video);
|
||||
directPlayProfile2.setVideoCodec(mp4VideoCodecs.join(','));
|
||||
directPlayProfile2.setAudioCodec(videoAudioCodecs.join(','));
|
||||
|
||||
QList<DTO::DirectPlayProfile> directPlayProfiles = {
|
||||
directPlayProfile1, directPlayProfile2
|
||||
};
|
||||
// Audio
|
||||
for (auto it = audioCodes.begin(); it != audioCodes.end(); it++) {
|
||||
if (*it == "mp2") {
|
||||
directPlayProfiles.append(QJsonObject {
|
||||
JsonPair("Container", "mp2,mp3"),
|
||||
JsonPair("Type", "Audio"),
|
||||
JsonPair("AudioCodec", "mp2")
|
||||
});
|
||||
DTO::DirectPlayProfile profile;
|
||||
profile.setContainer("mp2,mp3");
|
||||
profile.setType(DTO::DlnaProfileType::Audio);
|
||||
profile.setAudioCodec("mp2");
|
||||
directPlayProfiles.append(profile);
|
||||
} else if(*it == "mp3") {
|
||||
directPlayProfiles.append(QJsonObject {
|
||||
JsonPair("Container", "mp3"),
|
||||
JsonPair("Type", "Audio"),
|
||||
JsonPair("AudioCodec", "mp3")
|
||||
});
|
||||
DTO::DirectPlayProfile profile;
|
||||
profile.setContainer("mp3");
|
||||
profile.setType(DTO::DlnaProfileType::Audio);
|
||||
profile.setAudioCodec("mp3");
|
||||
directPlayProfiles.append(profile);
|
||||
} else if (*it == "webma") {
|
||||
directPlayProfiles.append(QJsonObject {
|
||||
JsonPair("Container", "webma,webm"),
|
||||
JsonPair("Type", "Audio"),
|
||||
});
|
||||
DTO::DirectPlayProfile profile;
|
||||
profile.setContainer("webma,webm");
|
||||
profile.setType(DTO::DlnaProfileType::Audio);
|
||||
directPlayProfiles.append(profile);
|
||||
} else {
|
||||
directPlayProfiles.append(QJsonObject {
|
||||
JsonPair("Container", *it),
|
||||
JsonPair("Type", "Audio")
|
||||
});
|
||||
DTO::DirectPlayProfile profile;
|
||||
profile.setContainer(*it);
|
||||
profile.setType(DTO::DlnaProfileType::Audio);
|
||||
directPlayProfiles.append(profile);
|
||||
}
|
||||
}
|
||||
|
||||
profile["CodecProfiles"] = codecProfiles;
|
||||
profile["ContainerProfiles"] = QJsonArray();
|
||||
profile["DirectPlayProfiles"] = directPlayProfiles;
|
||||
profile["ResponseProfiles"] = responseProfiles;
|
||||
profile["SubtitleProfiles"] = QJsonArray();
|
||||
profile["TranscodingProfiles"] = transcodingProfiles;
|
||||
profile["MaxStreamingBitrate"] = maxStreamingBitrate();
|
||||
profile.setCodecProfiles(codecProfiles);
|
||||
//profile["ContainerProfiles"] = QJsonArray();
|
||||
profile.setDirectPlayProfiles(directPlayProfiles);
|
||||
profile.setResponseProfiles(responseProfiles);
|
||||
//profile["SubtitleProfiles"] = QJsonArray();
|
||||
profile.setTranscodingProfiles(transcodingProfiles);
|
||||
profile.setMaxStreamingBitrate(std::make_optional<qint32>(maxStreamingBitrate()));
|
||||
return profile;
|
||||
}
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
|
||||
// #include "JellyfinQt/DTO/dto.h"
|
||||
#include <JellyfinQt/dto/useritemdatadto.h>
|
||||
#include <JellyfinQt/viewmodel/settings.h>
|
||||
#include <utility>
|
||||
|
||||
namespace Jellyfin {
|
||||
|
@ -89,6 +90,7 @@ void PlaybackManager::setItem(QSharedPointer<Model::Item> newItem) {
|
|||
emit hasPreviousChanged(m_queue->hasPrevious());
|
||||
|
||||
if (m_apiClient == nullptr) {
|
||||
|
||||
qWarning() << "apiClient is not set on this MediaSource instance! Aborting.";
|
||||
return;
|
||||
}
|
||||
|
@ -177,6 +179,8 @@ void PlaybackManager::updatePlaybackInfo() {
|
|||
|
||||
void PlaybackManager::playItem(Item *item) {
|
||||
setItem(item->data());
|
||||
emit hasNextChanged(m_queue->hasNext());
|
||||
emit hasPreviousChanged(m_queue->hasPrevious());
|
||||
}
|
||||
|
||||
void PlaybackManager::playItemInList(ItemModel *playlist, int index) {
|
||||
|
@ -186,6 +190,8 @@ void PlaybackManager::playItemInList(ItemModel *playlist, int index) {
|
|||
m_queueIndex = index;
|
||||
emit queueIndexChanged(m_queueIndex);
|
||||
setItem(playlist->itemAt(index));
|
||||
emit hasNextChanged(m_queue->hasNext());
|
||||
emit hasPreviousChanged(m_queue->hasPrevious());
|
||||
}
|
||||
|
||||
void PlaybackManager::skipToItemIndex(int index) {
|
||||
|
@ -200,6 +206,8 @@ void PlaybackManager::skipToItemIndex(int index) {
|
|||
m_queue->play(index);
|
||||
}
|
||||
setItem(m_queue->currentItem());
|
||||
emit hasNextChanged(m_queue->hasNext());
|
||||
emit hasPreviousChanged(m_queue->hasPrevious());
|
||||
}
|
||||
|
||||
void PlaybackManager::next() {
|
||||
|
@ -215,6 +223,8 @@ void PlaybackManager::next() {
|
|||
setItem(m_nextItem);
|
||||
}
|
||||
m_mediaPlayer->play();
|
||||
emit hasNextChanged(m_queue->hasNext());
|
||||
emit hasPreviousChanged(m_queue->hasPrevious());
|
||||
}
|
||||
|
||||
void PlaybackManager::previous() {
|
||||
|
@ -227,6 +237,8 @@ void PlaybackManager::previous() {
|
|||
m_queue->previous();
|
||||
setItem(m_queue->currentItem());
|
||||
m_mediaPlayer->play();
|
||||
emit hasNextChanged(m_queue->hasNext());
|
||||
emit hasPreviousChanged(m_queue->hasPrevious());
|
||||
}
|
||||
|
||||
void PlaybackManager::postPlaybackInfo(PlaybackInfoType type) {
|
||||
|
@ -285,21 +297,45 @@ void PlaybackManager::componentComplete() {
|
|||
void PlaybackManager::requestItemUrl(QSharedPointer<Model::Item> item) {
|
||||
ItemUrlLoader *loader = new Jellyfin::Loader::HTTP::GetPostedPlaybackInfoLoader(m_apiClient);
|
||||
Jellyfin::Loader::GetPostedPlaybackInfoParams params;
|
||||
|
||||
|
||||
// Check if we'd prefer to transcode if the video file contains multiple audio tracks
|
||||
// or if a subtitle track was selected.
|
||||
// This has to be done due to the lack of support of selecting audio tracks within QtMultimedia
|
||||
bool allowTranscoding = m_apiClient->settings()->allowTranscoding();
|
||||
bool transcodePreferred = m_subtitleIndex > 0;
|
||||
int audioTracks = 0;
|
||||
const QList<DTO::MediaStream> &streams = item->mediaStreams();
|
||||
for(int i = 0; i < streams.size(); i++) {
|
||||
const DTO::MediaStream &stream = streams[i];
|
||||
if (stream.type() == MediaStreamType::Audio) {
|
||||
audioTracks++;
|
||||
}
|
||||
}
|
||||
if (audioTracks > 1) {
|
||||
transcodePreferred = true;
|
||||
}
|
||||
|
||||
bool forceTranscoding = allowTranscoding && transcodePreferred;
|
||||
|
||||
QSharedPointer<DTO::PlaybackInfoDto> playbackInfo = QSharedPointer<DTO::PlaybackInfoDto>::create();
|
||||
params.setItemId(item->jellyfinId());
|
||||
params.setUserId(m_apiClient->userId());
|
||||
params.setEnableDirectPlay(true);
|
||||
params.setEnableDirectStream(true);
|
||||
params.setEnableTranscoding(true);
|
||||
params.setAudioStreamIndex(this->m_audioIndex);
|
||||
params.setSubtitleStreamIndex(this->m_subtitleIndex);
|
||||
playbackInfo->setEnableDirectPlay(true);
|
||||
playbackInfo->setEnableDirectStream(!forceTranscoding);
|
||||
playbackInfo->setEnableTranscoding(forceTranscoding || allowTranscoding);
|
||||
playbackInfo->setAudioStreamIndex(this->m_audioIndex);
|
||||
playbackInfo->setSubtitleStreamIndex(this->m_subtitleIndex);
|
||||
playbackInfo->setDeviceProfile(m_apiClient->deviceProfile());
|
||||
params.setBody(playbackInfo);
|
||||
|
||||
loader->setParameters(params);
|
||||
connect(loader, &ItemUrlLoader::ready, [this, loader, item] {
|
||||
connect(loader, &ItemUrlLoader::ready, this, [this, loader, item] {
|
||||
DTO::PlaybackInfoResponse result = loader->result();
|
||||
handlePlaybackInfoResponse(item->jellyfinId(), item->mediaType(), result);
|
||||
loader->deleteLater();
|
||||
});
|
||||
connect(loader, &ItemUrlLoader::error, [this, loader, item](QString message) {
|
||||
connect(loader, &ItemUrlLoader::error, this, [this, loader, item](QString message) {
|
||||
onItemErrorReceived(item->jellyfinId(), message);
|
||||
loader->deleteLater();
|
||||
});
|
||||
|
@ -312,12 +348,43 @@ void PlaybackManager::handlePlaybackInfoResponse(QString itemId, QString mediaTy
|
|||
QUrl resultingUrl;
|
||||
QString playSession = response.playSessionId();
|
||||
PlayMethod playMethod = PlayMethod::EnumNotSet;
|
||||
bool transcodingAllowed = m_apiClient->settings()->allowTranscoding();
|
||||
|
||||
|
||||
|
||||
for (int i = 0; i < mediaSources.size(); i++) {
|
||||
const DTO::MediaSourceInfo &source = mediaSources.at(i);
|
||||
|
||||
// Check if we'd prefer to transcode if the video file contains multiple audio tracks
|
||||
// or if a subtitle track was selected.
|
||||
// This has to be done due to the lack of support of selecting audio tracks within QtMultimedia
|
||||
bool transcodePreferred = false;
|
||||
if (transcodingAllowed) {
|
||||
transcodePreferred = m_subtitleIndex > 0;
|
||||
int audioTracks = 0;
|
||||
const QList<DTO::MediaStream> &streams = source.mediaStreams();
|
||||
for (int i = 0; i < streams.size(); i++) {
|
||||
DTO::MediaStream stream = streams[i];
|
||||
if (stream.type() == MediaStreamType::Audio) {
|
||||
audioTracks++;
|
||||
}
|
||||
}
|
||||
if (audioTracks > 1) {
|
||||
transcodePreferred = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
qDebug() << "Media source: " << source.name() << "\n"
|
||||
<< "Prefer transcoding: " << transcodePreferred << "\n"
|
||||
<< "DirectPlay supported: " << source.supportsDirectPlay() << "\n"
|
||||
<< "DirectStream supported: " << source.supportsDirectStream() << "\n"
|
||||
<< "Transcode supported: " << source.supportsTranscoding();
|
||||
|
||||
if (source.supportsDirectPlay() && QFile::exists(source.path())) {
|
||||
resultingUrl = QUrl::fromLocalFile(source.path());
|
||||
playMethod = PlayMethod::DirectPlay;
|
||||
} else if (source.supportsDirectStream()) {
|
||||
} else if (source.supportsDirectStream() && !transcodePreferred) {
|
||||
if (mediaType == "Video") {
|
||||
mediaType.append('s');
|
||||
}
|
||||
|
@ -329,7 +396,8 @@ void PlaybackManager::handlePlaybackInfoResponse(QString itemId, QString mediaTy
|
|||
resultingUrl = QUrl(m_apiClient->baseUrl() + "/" + mediaType + "/" + itemId
|
||||
+ "/stream." + source.container() + "?" + query.toString(QUrl::EncodeReserved));
|
||||
playMethod = PlayMethod::DirectStream;
|
||||
} else if (source.supportsTranscoding()) {
|
||||
} else if (source.supportsTranscoding() && !source.transcodingUrlNull() && transcodingAllowed) {
|
||||
qDebug() << "Transcoding url: " << source.transcodingUrl();
|
||||
resultingUrl = QUrl(m_apiClient->baseUrl() + source.transcodingUrl());
|
||||
playMethod = PlayMethod::Transcode;
|
||||
} else {
|
||||
|
@ -341,6 +409,7 @@ void PlaybackManager::handlePlaybackInfoResponse(QString itemId, QString mediaTy
|
|||
qWarning() << "Could not find suitable media source for item " << itemId;
|
||||
onItemErrorReceived(itemId, tr("Cannot fetch stream URL"));
|
||||
} else {
|
||||
emit playMethodChanged(playMethod);
|
||||
onItemUrlReceived(itemId, resultingUrl, playSession, playMethod);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,13 +43,13 @@ void Settings::setAllowTranscoding(bool allowTranscoding) {
|
|||
emit allowTranscodingChanged(allowTranscoding);
|
||||
}
|
||||
|
||||
int Settings::maxBitRate() const {
|
||||
return m_maxBitRate;
|
||||
int Settings::maxStreamingBitRate() const {
|
||||
return m_maxStreamingBitRate;
|
||||
}
|
||||
|
||||
void Settings::setMaxBitRate(int newMaxBitRate) {
|
||||
m_maxBitRate = newMaxBitRate;
|
||||
emit maxBitRateChanged(newMaxBitRate);
|
||||
void Settings::setMaxStreamingBitRate(int newMaxBitRate) {
|
||||
m_maxStreamingBitRate = newMaxBitRate;
|
||||
emit maxStreamingBitRateChanged(newMaxBitRate);
|
||||
}
|
||||
|
||||
} // NS ViewModel
|
||||
|
|
|
@ -34,6 +34,11 @@ Page {
|
|||
Column {
|
||||
id: content
|
||||
width: parent.width
|
||||
CheckBox {
|
||||
checked: ApiClient.settings.allowTranscoding
|
||||
text: "allow transcoding"
|
||||
onCheckedChanged: ApiClient.settings.allowTranscoding = checked
|
||||
}
|
||||
Repeater {
|
||||
model: mediaLibraryModel
|
||||
Column {
|
||||
|
|
|
@ -69,12 +69,23 @@ SilicaItem {
|
|||
}
|
||||
|
||||
Label {
|
||||
readonly property string _playbackMethod: {
|
||||
switch(manager.playMethod) {
|
||||
case J.PlaybackManager.DirectPlay:
|
||||
return"Direct Play"
|
||||
case J.PlaybackManager.Transcoding:
|
||||
return "Transcoding"
|
||||
case J.PlaybackManager.DirectStream:
|
||||
return "Direct Stream"
|
||||
default:
|
||||
return "Unknown playback method"
|
||||
}
|
||||
}
|
||||
anchors.fill: parent
|
||||
anchors.margins: Theme.horizontalPageMargin
|
||||
text: item.jellyfinId + "\n" + appWindow.playbackManager.streamUrl + "\n"
|
||||
+ (manager.playMethod === J.PlaybackManager.DirectPlay ? "Direct Play" : "Transcoding") + "\n"
|
||||
+ manager.position + "\n"
|
||||
+ manager.mediaStatus + "\n"
|
||||
+ "Playback method: " + _playbackMethod + "\n"
|
||||
+ "Media status: " + manager.mediaStatus + "\n"
|
||||
// + player.bufferProgress + "\n"
|
||||
// + player.metaData.videoCodec + "@" + player.metaData.videoFrameRate + "(" + player.metaData.videoBitRate + ")" + "\n"
|
||||
// + player.metaData.audioCodec + "(" + player.metaData.audioBitRate + ")" + "\n"
|
||||
|
|
|
@ -22,6 +22,7 @@ import Sailfish.Silica 1.0
|
|||
import QtMultimedia 5.6
|
||||
import nl.netsoj.chris.Jellyfin 1.0
|
||||
|
||||
import Nemo.Configuration 1.0
|
||||
import Nemo.Notifications 1.0
|
||||
import Nemo.KeepAlive 1.2
|
||||
|
||||
|
@ -41,9 +42,10 @@ ApplicationWindow {
|
|||
property var itemData: pageStack.currentPage.itemData
|
||||
|
||||
// Bad way to implement settings, but it'll do for now.
|
||||
property bool showDebugInfo: true
|
||||
property alias showDebugInfo: config.showDebugInfo
|
||||
property bool _hidePlaybackBar: false
|
||||
|
||||
|
||||
bottomMargin: playbackBar.visibleSize
|
||||
ApiClient {
|
||||
id: _apiClient
|
||||
|
@ -132,6 +134,12 @@ ApplicationWindow {
|
|||
Component.onCompleted: playbackBar.parent = __silica_applicationwindow_instance._rotatingItem
|
||||
}
|
||||
|
||||
ConfigurationGroup {
|
||||
id: config
|
||||
path: "/nl/netsoj/chris/Sailfin"
|
||||
property bool showDebugInfo: false
|
||||
}
|
||||
|
||||
//FIXME: proper error handling
|
||||
Connections {
|
||||
target: apiClient
|
||||
|
|
|
@ -117,6 +117,13 @@ Page {
|
|||
text: qsTr("Other")
|
||||
}
|
||||
|
||||
IconListItem {
|
||||
//: Settings list item for settings related to streaming
|
||||
text: qsTr("Streaming settings")
|
||||
iconSource: "image://theme/icon-m-cloud-download"
|
||||
onClicked: pageStack.push(Qt.resolvedUrl("settings/StreamingPage.qml"))
|
||||
}
|
||||
|
||||
IconListItem {
|
||||
//: Debug information settings menu itemy
|
||||
text: qsTr("Debug information")
|
||||
|
|
|
@ -18,6 +18,7 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
|||
*/
|
||||
import QtQuick 2.6
|
||||
import Sailfish.Silica 1.0
|
||||
import Nemo.Configuration 1.0
|
||||
|
||||
import nl.netsoj.chris.Jellyfin 1.0 as J
|
||||
|
||||
|
@ -30,6 +31,12 @@ Page {
|
|||
// The effective value will be restricted by ApplicationWindow.allowedOrientations
|
||||
allowedOrientations: Orientation.All
|
||||
|
||||
ConfigurationGroup {
|
||||
id: config
|
||||
path: "/nl/netsoj/chris/Sailfin"
|
||||
property bool showDebugInfo: false
|
||||
}
|
||||
|
||||
SilicaFlickable {
|
||||
anchors.fill: parent
|
||||
contentHeight: content.height
|
||||
|
@ -42,8 +49,8 @@ Page {
|
|||
|
||||
TextSwitch {
|
||||
text: qsTr("Show debug information")
|
||||
checked: appWindow.showDebugInfo
|
||||
onCheckedChanged: appWindow.showDebugInfo = checked
|
||||
checked: config.showDebugInfo
|
||||
onCheckedChanged: config.showDebugInfo = checked
|
||||
}
|
||||
|
||||
SectionHeader {
|
||||
|
|
67
sailfish/qml/pages/settings/StreamingPage.qml
Normal file
67
sailfish/qml/pages/settings/StreamingPage.qml
Normal file
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
Sailfin: a Jellyfin client written using Qt
|
||||
Copyright (C) 2021 Chris Josten
|
||||
|
||||
This library is free software; you can redistribute it and/or
|
||||
modify it under the terms of the GNU Lesser General Public
|
||||
License as published by the Free Software Foundation; either
|
||||
version 2.1 of the License, or (at your option) any later version.
|
||||
|
||||
This library is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
Lesser General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Lesser General Public
|
||||
License along with this library; if not, write to the Free Software
|
||||
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*/
|
||||
import QtQuick 2.6
|
||||
import Sailfish.Silica 1.0
|
||||
|
||||
import nl.netsoj.chris.Jellyfin 1.0 as J
|
||||
|
||||
import "../../components"
|
||||
import "../.."
|
||||
|
||||
Page {
|
||||
id: page
|
||||
|
||||
// The effective value will be restricted by ApplicationWindow.allowedOrientations
|
||||
allowedOrientations: Orientation.All
|
||||
|
||||
SilicaFlickable {
|
||||
anchors.fill: parent
|
||||
contentHeight: content.height
|
||||
|
||||
Column {
|
||||
id: content
|
||||
width: parent.width
|
||||
|
||||
PageHeader {
|
||||
title: qsTr("Streaming settings")
|
||||
}
|
||||
|
||||
TextSwitch {
|
||||
text: qsTr("Allow transcoding")
|
||||
description: qsTr("If enabled, Sailfin may request the Jellyfin server " +
|
||||
"to transcode media to a more suitable media format for this device. " +
|
||||
"It is recommended to leave this enabled unless your server is weak.")
|
||||
checked: appWindow.apiClient.settings.allowTranscoding
|
||||
onCheckedChanged: appWindow.apiClient.settings.allowTranscoding = checked
|
||||
}
|
||||
|
||||
Slider {
|
||||
minimumValue: 0
|
||||
maximumValue: 64 * 1024 * 1024
|
||||
stepSize: 1024 * 128
|
||||
valueText: qsTr("%1 mbps").arg((value / 1024 / 1024).toPrecision(4))
|
||||
value: appWindow.apiClient.settings.maxStreamingBitRate
|
||||
onDownChanged: if (!down) appWindow.apiClient.settings.maxStreamingBitRate = value
|
||||
label: qsTr("Maximum streaming bitrate")
|
||||
width: parent.width
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in a new issue