settings.beginGroup(QString::fromLatin1("backend"));
settings.setValue(QString::fromLatin1("video_uri"), video_uri);
+ settings.setValue(QString::fromLatin1("v6FirstWord"), v6FirstWord);
settings.setValue(QString::fromLatin1("segment_buffer_size"), segment_buffer_size);
settings.endGroup();
QString frag_sample;
QString frag_pp;
QString video_uri;
+ QString v6FirstWord;
qreal segment_buffer_size;
QString last_played;
QString adaptation_logic;
settings.beginGroup(QString::fromLatin1("backend"));
setVideoURI(settings.value(QString::fromLatin1("video_uri"), QString::fromLatin1("http://webserver/sintel/mpd")).toString());
+ setV6FirstWord(settings.value(QString::fromLatin1("v6FirstWord"), QString::fromLatin1("b001")).toString());
setSegmentBufferSize(settings.value(QString::fromLatin1("segment_buffer_size"), 20).toReal());
settings.endGroup();
{
return mpData->video_uri;
}
+QString Config::v6FirstWord() const
+{
+ return mpData->v6FirstWord;
+}
Config& Config::setVideoURI(const QString &text)
{
return *this;
}
+Config& Config::setV6FirstWord(const QString &text)
+{
+ if (mpData->v6FirstWord == text)
+ return *this;
+ mpData->v6FirstWord = text;
+ Q_EMIT v6FirstWordChanged();
+ Q_EMIT changed();
+ return *this;
+}
+
qreal Config::segmentBufferSize() const
{
return mpData->segment_buffer_size;
Q_PROPERTY(QString fragSample READ fragSample WRITE setFragSample NOTIFY fragSampleChanged)
Q_PROPERTY(QString fragPostProcess READ fragPostProcess WRITE setFragPostProcess NOTIFY fragPostProcessChanged)
Q_PROPERTY(QString videoURI READ videoURI WRITE setVideoURI NOTIFY videoURIChanged)
+ Q_PROPERTY(QString v6FirstWord READ v6FirstWord WRITE setV6FirstWord NOTIFY v6FirstWordChanged)
Q_PROPERTY(qreal segmentBufferSize READ segmentBufferSize WRITE setSegmentBufferSize NOTIFY segmentBufferSizeChanged)
Q_PROPERTY(QString adaptationLogic READ adaptationLogic WRITE setAdaptationLogic NOTIFY adaptationLogicChanged)
QString fragPostProcess() const;
Config& setVideoURI(const QString &value);
QString videoURI() const;
+ Config& setV6FirstWord(const QString &value);
+ QString v6FirstWord() const;
Config& setSegmentBufferSize(qreal value);
qreal segmentBufferSize() const;
Config& setAdaptationLogic(const QString &value);
Q_SIGNAL void languageChanged();
Q_SIGNAL void historyChanged();
Q_SIGNAL void videoURIChanged();
+ Q_SIGNAL void v6FirstWordChanged();
Q_SIGNAL void segmentBufferSizeChanged();
Q_SIGNAL void adaptationLogicChanged();
Q_SIGNAL void icnChanged();
using namespace dash::mpd;
using namespace libdash::framework::mpd;
-DASHManager::DASHManager(viper::managers::StreamType type, uint32_t maxCapacity, IDASHManagerObserver* stream, MPDWrapper* mpdWrapper, bool icnEnabled, double icnAlpha, bool nodecoding, float beta, float drop) :
- readSegmentCount (0),
- receiver (NULL),
- multimediaStream (stream),
- isRunning (false),
- icn (icnEnabled),
- icnAlpha (icnAlpha),
- noDecoding (nodecoding),
- beta (beta),
- drop (drop)
+DASHManager::DASHManager(viper::managers::StreamType type, uint32_t maxCapacity, IDASHManagerObserver* stream, MPDWrapper* mpdWrapper, bool icnEnabled, double icnAlpha, bool nodecoding, float beta, float drop, std::string v6FirstWord) :
+ readSegmentCount (0),
+ receiver (NULL),
+ multimediaStream (stream),
+ isRunning (false),
+ icn (icnEnabled),
+ icnAlpha (icnAlpha),
+ noDecoding (nodecoding),
+ beta (beta),
+ drop (drop),
+ v6FirstWord (v6FirstWord)
{
this->buffer = new Buffer<MediaObject>(maxCapacity,libdash::framework::buffer::VIDEO);
this->buffer->attachObserver(this);
- this->receiver = new DASHReceiver(type, mpdWrapper, this, this->buffer, maxCapacity, this->isICN(), this->icnAlpha, this->beta, this->drop);
+ this->receiver = new DASHReceiver(type, mpdWrapper, this, this->buffer, maxCapacity, this->isICN(), this->icnAlpha, this->beta, this->drop, this->v6FirstWord);
}
DASHManager::~DASHManager()
{
class DASHManager : public IDASHReceiverObserver, public IBufferObserver
{
public:
- DASHManager (viper::managers::StreamType type, uint32_t maxCapacity, IDASHManagerObserver *multimediaStream, libdash::framework::mpd::MPDWrapper *mpdWrapper, bool icnEnabled, double icnAlpha, bool nodecoding, float beta, float drop);
+ DASHManager (viper::managers::StreamType type, uint32_t maxCapacity, IDASHManagerObserver *multimediaStream, libdash::framework::mpd::MPDWrapper *mpdWrapper, bool icnEnabled, double icnAlpha, bool nodecoding, float beta, float drop, std::string v6FirstWord);
virtual ~DASHManager ();
bool start();
private:
float beta;
float drop;
+ std::string v6FirstWord;
buffer::Buffer<MediaObject> *buffer;
DASHReceiver *receiver;
uint32_t readSegmentCount;
using duration_in_seconds = std::chrono::duration<double, std::ratio<1, 1> >;
-DASHReceiver::DASHReceiver (viper::managers::StreamType type, MPDWrapper *mpdWrapper, IDASHReceiverObserver *obs, Buffer<MediaObject> *buffer, uint32_t bufferSize, bool icnEnabled, double icnAlpha, float beta, float drop) :
+DASHReceiver::DASHReceiver (viper::managers::StreamType type, MPDWrapper *mpdWrapper, IDASHReceiverObserver *obs, Buffer<MediaObject> *buffer, uint32_t bufferSize, bool icnEnabled, double icnAlpha, float beta, float drop, std::string v6FirstWord) :
type (type),
mpdWrapper (mpdWrapper),
adaptationSetStream (NULL),
isLooping (false),
beta (beta),
drop (drop),
+ v6FirstWord (v6FirstWord),
bufferingThread (NULL),
mpdFetcherThread (NULL)
{
if(icn)
{
- this->conn = new ICNConnectionConsumerApi(this->icnAlpha, this->beta, this->drop);
- this->initConn = new ICNConnectionConsumerApi(this->icnAlpha, this->beta, this->drop);
+ this->conn = new ICNConnectionConsumerApi(this->icnAlpha, this->beta, this->drop, this->v6FirstWord);
+ this->initConn = new ICNConnectionConsumerApi(this->icnAlpha, this->beta, this->drop, this->v6FirstWord);
}
InitializeCriticalSection(&this->monitorMutex);
InitializeCriticalSection(&this->monitorPausedMutex);
class DASHReceiver
{
public:
- DASHReceiver(viper::managers::StreamType type, libdash::framework::mpd::MPDWrapper *mpdWrapper, IDASHReceiverObserver *obs, buffer::Buffer<MediaObject> *buffer, uint32_t bufferSize, bool icnEnabled, double icnAlpha, float beta, float drop);
+ DASHReceiver(viper::managers::StreamType type, libdash::framework::mpd::MPDWrapper *mpdWrapper, IDASHReceiverObserver *obs, buffer::Buffer<MediaObject> *buffer, uint32_t bufferSize, bool icnEnabled, double icnAlpha, float beta, float drop, std::string v6FirstWord);
virtual ~DASHReceiver();
bool Start();
private:
float beta;
float drop;
+ std::string v6FirstWord;
bool withFeedBack;
bool isBufferBased;
std::map<std::string, MediaObject*> initSegments;
namespace libdash {
namespace framework {
namespace input {
-ICNConnectionConsumerApi::ICNConnectionConsumerApi(double alpha, float beta, float drop) :
+ICNConnectionConsumerApi::ICNConnectionConsumerApi(double alpha, float beta, float drop, std::string v6FirstWord) :
m_first(1),
m_isFinished(false),
sizeDownloaded (0),
cumulativeBytesReceived(0),
icnAlpha(alpha),
beta(beta),
- drop(drop)
+ drop(drop),
+ v6FirstWord(v6FirstWord)
{
gamma = 1;
this->speed = 0.0;
{"User-Agent", "higet/1.0"},
{"Connection", "Keep-Alive"}};
std::string s(m_name.c_str());
- hTTPClientConnection->get(s, headers);
+ hTTPClientConnection->get(s, headers, {}, nullptr, nullptr, this->v6FirstWord);
response = hTTPClientConnection->response();
this->res = true;
this->dataPos = 0;
public:
- ICNConnectionConsumerApi(double alpha, float beta, float drop);
+ ICNConnectionConsumerApi(double alpha, float beta, float drop, std::string v6FirstWord);
virtual ~ICNConnectionConsumerApi();
virtual void Init(dash::network::IChunk *chunk);
libl4::http::HTTPResponse response;
float beta;
float drop;
+ std::string v6FirstWord;
uint64_t i_chunksize;
int i_lifetime;
int i_missed_co;
{
this->url = url;
EnterCriticalSection(&this->monitorMutex);
- this->icnConn = new libdash::framework::input::ICNConnectionConsumerApi(20.0, this->beta, this->drop);
+ this->icnConn = new libdash::framework::input::ICNConnectionConsumerApi(20.0, this->beta, this->drop, this->v6FirstWord);
icnConn->InitForMPD(url);
int ret = 0;
char * data = (char *)malloc(4096);
void MultimediaManager::initVideoRendering(uint32_t offset)
{
- this->videoStream = new MultimediaStream(viper::managers::VIDEO, this->mpdWrapper, this->segmentBufferSize, this->isICN(), this->icnAlpha, this->noDecoding, this->beta, this->drop);
+ this->videoStream = new MultimediaStream(viper::managers::VIDEO, this->mpdWrapper, this->segmentBufferSize, this->isICN(), this->icnAlpha, this->noDecoding, this->beta, this->drop, this->v6FirstWord);
this->videoStream->attachStreamObserver(this);
this->videoStream->setPosition(offset);
}
void MultimediaManager::initAudioPlayback(uint32_t offset)
{
- this->audioStream = new MultimediaStream(viper::managers::AUDIO, this->mpdWrapper, this->segmentBufferSize, this->isICN(), this->icnAlpha, this->noDecoding, this->beta, this->drop);
+ this->audioStream = new MultimediaStream(viper::managers::AUDIO, this->mpdWrapper, this->segmentBufferSize, this->isICN(), this->icnAlpha, this->noDecoding, this->beta, this->drop, this->v6FirstWord);
this->audioStream->attachStreamObserver(this);
this->audioStream->setPosition(offset);
}
this->drop = drop;
}
+void MultimediaManager::setV6FirstWord(std::string v6FirstWord)
+{
+ this->v6FirstWord = v6FirstWord;
+}
+
void MultimediaManager::fetchMPD()
{
if(this->icn)
void setOffset (int offset);
void setBeta (float beta);
void setDrop (float drop);
+ void setV6FirstWord (std::string v6FirstWord);
bool canPush ();
void fetchMPD ();
private:
float beta;
float drop;
+ std::string v6FirstWord;
std::string downloadPath;
int segmentBufferSize;
ViperGui *viperGui;
using namespace libdash::framework::mpd;
using namespace dash::mpd;
-MultimediaStream::MultimediaStream(StreamType type, MPDWrapper *mpdWrapper, uint32_t bufferSize, bool icnEnabled, double icnAlpha, bool nodecoding, float beta, float drop) :
+MultimediaStream::MultimediaStream(StreamType type, MPDWrapper *mpdWrapper, uint32_t bufferSize, bool icnEnabled, double icnAlpha, bool nodecoding, float beta, float drop, std::string v6FirstWord) :
type (type),
segmentBufferSize (bufferSize),
dashManager (NULL),
icnAlpha (icnAlpha),
noDecoding (nodecoding),
beta (beta),
- drop (drop)
+ drop (drop),
+ v6FirstWord (v6FirstWord)
{
this->init();
}
void MultimediaStream::init()
{
- this->dashManager = new DASHManager(this->type, this->segmentBufferSize, this, this->mpdWrapper, this->isICN(), this->icnAlpha, this->noDecoding, this->beta, this->drop);
+ this->dashManager = new DASHManager(this->type, this->segmentBufferSize, this, this->mpdWrapper, this->isICN(), this->icnAlpha, this->noDecoding, this->beta, this->drop, this->v6FirstWord);
}
bool MultimediaStream::start()
class MultimediaStream : public libdash::framework::input::IDASHManagerObserver, public libdash::framework::buffer::IBufferObserver
{
public:
- MultimediaStream(StreamType type, libdash::framework::mpd::MPDWrapper *mpdWrapper, uint32_t segmentBufferSize, bool icnEnabled, double icnAlpha, bool nodecoding, float beta, float drop);
+ MultimediaStream(StreamType type, libdash::framework::mpd::MPDWrapper *mpdWrapper, uint32_t segmentBufferSize, bool icnEnabled, double icnAlpha, bool nodecoding, float beta, float drop, std::string v6FirstWord);
virtual ~MultimediaStream();
bool start();
private:
float beta;
float drop;
+ std::string v6FirstWord;
std::vector<IStreamObserver *> observers;
libdash::framework::mpd::MPDWrapper *mpdWrapper;
libdash::framework::adaptation::IAdaptationLogic *logic;
using namespace dash::mpd;
using namespace std;
-DASHPlayer::DASHPlayer(ViperGui &gui, Config *config) :
+DASHPlayer::DASHPlayer(int argc, char* argv[], ViperGui &gui, Config *config) :
gui (&gui),
config (config)
{
this->multimediaManager = new MultimediaManager(this->gui, this->parametersAdaptation->segmentBufferSize, config->getConfigPath().toStdString() + QString::fromLatin1("/").toStdString());
this->multimediaManager->setBeta(config->beta());
this->multimediaManager->setDrop(config->drop());
+ this->multimediaManager->setV6FirstWord(config->v6FirstWord().toStdString());
connect(this->gui->getVideoPlayer(), SIGNAL(positionChanged(qint64)), SLOT(updateSlider(qint64)));
connect(this->gui->getVideoPlayer(), SIGNAL(stateChanged(QtAV::AVPlayer::State)), SLOT(manageGraph(QtAV::AVPlayer::State)));
connect(this->gui->getVideoPlayer(), SIGNAL(error(QtAV::AVError)), this, SLOT(error(QtAV::AVError)));
this->multimediaManager->attachManagerObserver(this);
this->mpdWrapper = new MPDWrapper(NULL);
this->multimediaManager->setMPDWrapper(this->mpdWrapper);
+
+ //TODO adding a proper parsing function to handle command line arguments
+
+ int index = 1;
+
+ while(index < argc)
+ {
+ if(!strcmp(argv[index], "-P") && index+1 < argc)
+ {
+ this->setV6FirstWord(argv[index+1]);
+ index += 2;
+ }
+ }
+
+
}
DASHPlayer::~DASHPlayer()
this->beta = config->beta();
this->drop = config->drop();
this->videoURI = config->videoURI().toStdString();
+ this->v6FirstWord = config->v6FirstWord().toStdString();
this->alpha = config->alpha();
this->repeat = config->repeat();
this->parametersAdaptation = (struct AdaptationParameters *)malloc(sizeof(struct AdaptationParameters));
return config->videoURI();
}
+QString DASHPlayer::getV6FirstWord()
+{
+ return config->v6FirstWord();
+}
+
void DASHPlayer::setVideoURI(QString videoURI)
{
config->setVideoURI(videoURI);
}
+void DASHPlayer::setV6FirstWord(QString v6FirstWord)
+{
+ config->setV6FirstWord(v6FirstWord);
+ this->multimediaManager->setV6FirstWord(v6FirstWord.toStdString());
+}
+
qreal DASHPlayer::getAlpha()
{
return config->alpha();
Q_OBJECT
public:
- DASHPlayer(ViperGui& gui, Config *config);
+ DASHPlayer(int argc, char* argv[], ViperGui& gui, Config *config);
virtual ~DASHPlayer();
virtual void onSettingsChanged(int period, int videoAdaptationSet, int videoRepresentation, int audioAdaptationSet, int audioRepresentation);
Q_INVOKABLE bool getIcn();
Q_INVOKABLE void setIcn(bool icn);
Q_INVOKABLE QString getVideoURI();
+ Q_INVOKABLE QString getV6FirstWord();
Q_INVOKABLE void setVideoURI(QString videoURI);
+ Q_INVOKABLE void setV6FirstWord(QString v6FirstWord);
Q_INVOKABLE qreal getAlpha();
Q_INVOKABLE void setAlpha(qreal alpha);
Q_INVOKABLE qreal getSegmentBufferSize();
const char *url;
bool icn;
std::string videoURI;
+ std::string v6FirstWord;
double alpha;
struct libdash::framework::adaptation::AdaptationParameters *parametersAdaptation;
libdash::framework::adaptation::LogicType adaptLogic;
gui.setPlayButton(viewer.rootObject()->findChild<QObject*>(QStringLiteral("playBtn")));
gui.setGraphDataSource(&graphDataSource);
gui.setRootObject(viewer.rootObject());
- DASHPlayer dashPlayer(gui, &Config::instance());
+ DASHPlayer dashPlayer(argc, argv, gui, &Config::instance());
engine->rootContext()->setContextProperty("dashPlayer",&dashPlayer);
QMetaObject::invokeMethod(viewer.rootObject(), "initGraph", Q_ARG(QVariant, (&Config::instance())->graph()));
QMetaObject::invokeMethod(viewer.rootObject(), "initRepeat", Q_ARG(QVariant, (&Config::instance())->repeat()));
signal saveAdaptationLogic(string selectedAdaptationLogic, int adaptationLogicKey)
signal saveIcn(bool selectedIcn)
signal saveVideoURI(string selectedVideoURI)
+ signal saveV6FirstWord(string selectedV6FirstWord)
signal saveSegmentBufferSize(real selectedSegmentBufferSize)
signal saveRateAlpha(real selectedRateAlpha)
signal saveBufferReservoirThreshold(real selectedBufferReservoirThreshold)
text: videoURI
}
}
+ Item {
+ id: itemv6Prefix
+ anchors.top: parent.top
+ anchors.left: parent.left
+ anchors.right: parent.right
+ anchors.rightMargin: parent.width/2
+ anchors.topMargin: Utils.scaled(18) + 2*heightRow
+ Label {
+ text: "IPv6 First Word:"
+ id: labelv6Prefix
+ color: "white"
+ anchors.top: parent.top
+ anchors.right: textInputv6FirstWord.left
+ anchors.rightMargin: Utils.scaled(5)
+ anchors.topMargin: (textInputv6Prefix.height - height)/2
+ font.bold: true
+ font.pixelSize: Utils.scaled(10);
+ }
+
+ TextInput {
+ id: textInputv6FirstWord
+ z: parent.z + 1
+ width: parent.width/4*3
+ anchors.top: parent.bottom
+ anchors.left: parent.left
+ anchors.leftMargin: Utils.scaled(200)
+ font.pixelSize: Utils.scaled(20)
+ color: "white"
+ text: v6FirstWord
+ }
+ }
Item {
id: itemSegmentBufferSize
saveAdaptationLogic(adaptationLogicModel.get(comboAdaptationSetList.currentIndex).text, comboAdaptationSetList.currentIndex);
saveIcn(switchIcn.checked)
saveVideoURI(textInputVideoURI.text)
+ saveV6FirstWord(textInputv6FirstWord.text)
saveSegmentBufferSize(spinboxSegmentBufferSize.value/100)
saveRateAlpha(spinboxRateAlpha.value/100)
saveBufferReservoirThreshold(spinboxBufferReservoirThreshold.value/100)
property bool buffering: false
property string adaptationLogic: ""
property string videoURI: ""
+ property string v6FirstWord: ""
property real alpha: 0
property real segmentBufferSize: 0
property bool icn: false
icn = dashPlayer.getIcn()
adaptationLogic = dashPlayer.getAdaptationLogic()
videoURI = dashPlayer.getVideoURI()
+ v6FirstWord = dashPlayer.getV6FirstWord()
segmentBufferSize = dashPlayer.getSegmentBufferSize()
rateAlpha = dashPlayer.getRateAlpha()
bufferReservoirThreshold = dashPlayer.getBufferReservoirThreshold()
videoURI = selectedVideoURI
}
+ onSaveV6FirstWord: {
+ dashPlayer.setV6FirstWord(selectedV6FirstWord)
+ v6FirstWord = selectedV6FirstWord
+ }
+
onSaveSegmentBufferSize: {
dashPlayer.setSegmentBufferSize(selectedSegmentBufferSize)
segmentBufferSize = selectedSegmentBufferSize