Publications

 

Book

Musikk og bevegelse
My book “Musikk og bevegelse” (Music and movement) is a text book in Norwegian giving an overview of theory and methods used in the study of music-related movements:

Denne boken gir en helhetlig presentasjon av hvordan vi kan forstå musikkrelaterte bevegelser. Forfatteren viser hvordan man kan studere slike bevegelser med utgangspunkt i et tverrfaglig teorigrunnlag. Det gis en innføring både i kvalitative og kvantitative metoder som brukes i forskningen innenfor feltet. Eksempler er tatt fra studier av musikere, dansere og folk som beveger seg til musikk. Boken viser hvordan kropp og bevegelse er en naturlig del av musikkopplevelsen, og det argumenteres for et større fokus på musikkrelaterte bevegelser i forskning og undervisning.

 

The following publication list is generated automagically using the BibTeX2HTML WordPress plugin.

[bibtex grouptype sort=year,author highlight=Jensenius]
@article{Godoy:2008c,
Abstract = {From our studies of sound-related movement (http://musicalgestures.uio.no), we have reason to believe that both sound-producing and sound-accompanying movements are centered around what we call goal-points, meaning certain salient events in the music such as downbeats, or various accent types, or melodic peaks. In music performance, these goal-points are reflected in the positions and shapes of the performers’ effectors (fingers, hands, arms, torso, etc.) at certain moments in time, similar to what is known as keyframes in animation. The movement trajectories between these goal-points, similar to what is known as inter-frames in animation, may often demonstrate the phenomenon of coarticulation, i.e. that the various smaller movement are subsumed under more superordinate and goal-directed movement trajectories. In this paper, we shall present a summary of recent neurocognitive research in support of this scheme of goal-points and coarticulations, as well as demonstrate this with data from our ongoing motion capture studies of pianists’ performance and other researchers’ motion capture data.},
Author = {God{\o}y, Rolf Inge and Jensenius, Alexander Refsum and Nymoen, Kristian},
Date-Added = {2008-01-10 13:54:19 +0100},
Date-Modified = {2012-08-10 16:03:33 +0000},
Journal = {Journal of Acoustical Society of America},
Pages = {3657-3657},
Title = {Production and perception of goal-points and coarticulations in music},
Volume = {123},
Year = {2008},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YW8QHQAuAC4ALwBQAEQARgBzAC8ARwBvAGQA+AB5AC8ARwBvAGQA+AB5AF8AMgAwADAAOABhAC4AcABkAGbSFwsYGVdOUy5kYXRhTxEBpgAAAAABpgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAACMqDD0dvZL95XzIwMDhhLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKkrTEkVeWAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAVHb2S/eQAAEAAIAADKPHEDAAAAEQAIAADEkTt2AAAAAQAUAAjKgwAG0+oABW3NAAUAyAAAvzEAAgBFTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBHb2S/eToAR29kv3lfMjAwOGEucGRmAAAOACAADwBHAG8AZAD4AHkAXwAyADAAMAA4AGEALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADVVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9Hb2TDuHkvR29kw7h5XzIwMDhhLnBkZgAAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgDLANAA2AKCAoQCiQKUAp0CqwKvArYCvwLEAtEC1ALmAukC7gAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALw}}

@article{Adde:2009,
Abstract = {OBJECTIVE: Absence of fidgety movements (FM) in high-risk infants is a strong marker for later cerebral palsy (CP). FMs can be classified by the General Movement Assessment (GMA), based on Gestalt perception of the infant’s movement pattern. More objective movement analysis may be provided by computer-based technology. The aim of this study was to explore the feasibility of a computer-based video analysis of infants’ spontaneous movements in classifying non-fidgety versus fidgety movements. METHOD: GMA was performed from video material of the fidgety period in 82 term and preterm infants at low and high risks of developing CP. The same videos were analysed using the developed software called General Movement Toolbox (GMT) with visualisation of the infant’s movements for qualitative analyses. Variables derived from the calculation of displacement of pixels from one video frame to the next were used for quantitative analyses. RESULTS: Visual representations from GMT showed easily recognisable patterns of FMs. Of the eight quantitative variables derived, the variability in displacement of a spatial centre of active pixels in the image had the highest sensitivity (81.5) and specificity (70.0) in classifying FMs. By setting triage thresholds at 90% sensitivity and specificity for FM, the need for further referral was reduced by 70%. CONCLUSION: Video recordings can be used for qualitative and quantitative analyses of FMs provided by GMT. GMT is easy to implement in clinical practice, and may provide assistance in detecting infants without FMs.},
Author = {Adde, Lars and Helbostad, Jorunn L. and Jensenius, Alexander Refsum and Taraldsen, Gunnar and St{\o}en, Ragnhild},
Date-Added = {2009-05-27 08:41:58 +0200},
Date-Modified = {2011-12-09 08:26:39 +0000},
Doi = {10.1016/j.earlhumdev.2009.05.003},
Journal = {Early Human Development},
Number = {9},
Pages = {541–547},
Publisher = {Elsevier},
Title = {Using computer-based video analysis in the study of fidgety movements},
Url = {http://dx.doi.org/doi:10.1016/j.earlhumdev.2009.05.003},
Volume = {85},
Year = {2009},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QGi4uL1BERnMvQWRkZS9BZGRlXzIwMDkucGRm0hcLGBlXTlMuZGF0YU8RAZYAAAAAAZYAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMo8jSNIKwAAAAbUeg1BZGRlXzIwMDkucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABtR9xkKupwAAAAAAAAAAAAEAAwAACSAAAAAAAAAAAAAAAAAAAAAEQWRkZQAQAAgAAMo8cQMAAAARAAgAAMZCkocAAAABABQABtR6AAbT6gAFbc0ABQDIAAC/MQACAEJNYWNpbnRvc2ggSEQ6VXNlcnM6AGFsZXhhbmplOgBSZWZlcmVuY2U6AFBERnM6AEFkZGU6AEFkZGVfMjAwOS5wZGYADgAcAA0AQQBkAGQAZQBfADIAMAAwADkALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADBVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9BZGRlL0FkZGVfMjAwOS5wZGYAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgCrALAAuAJSAlQCWQJkAm0CewJ/AoYCjwKUAqECpAK2ArkCvgAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALA},
Bdsk-Url-1 = {http://www.sciencedirect.com/science?_ob=ArticleURL&_udi=B6T65-4WBR6GJ-1&_user=674998&_rdoc=1&_fmt=&_orig=search&_sort=d&view=c&_acct=C000036598&_version=1&_urlVersion=0&_userid=674998&md5=fe057896e9ae4af0b03a0b199ab83cd0},
Bdsk-Url-2 = {http://dx.doi.org/doi:10.1016/j.earlhumdev.2009.05.003},
Bdsk-Url-3 = {http://dx.doi.org/10.1016/j.earlhumdev.2009.05.003}}

@article{Godoy:2010b,
Abstract = {In our own and other research on music-related actions, findings suggest that perceived action and sound are broken down into a series of chunks in people’s minds when they perceive or imagine music. Chunks are here understood as holistically conceived and perceived fragments of action and sound, typically with durations in the 0.5 to 5 seconds range. There is also evidence suggesting the occurrence of coarticulation within these chunks, meaning the fusion of small-scale actions and sounds into more superordinate actions and sounds. Various aspects of chunking and coarticulation are discussed in view of their role in the production and perception of music, and it is suggested that coarticulation is an integral element of music and should be more extensively explored in the future.},
Annote = {Month: July/August },
Author = {God{\o}y, Rolf Inge and Jensenius, Alexander Refsum and Nymoen, Kristian},
Date-Added = {2010-07-06 18:30:50 +0200},
Date-Modified = {2011-12-09 08:26:56 +0000},
Doi = {10.3813/AAA.918323},
Journal = {Acta Acoustica united with Acoustica},
Number = {4},
Pages = {690–700},
Publisher = {S. Hirzel Verlag},
Title = {Chunking in Music by Coarticulation},
Url = {http://dx.doi.org/10.3813/AAA.918323},
Volume = {96},
Year = {2010},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YW8QHAAuAC4ALwBQAEQARgBzAC8ARwBvAGQA+AB5AC8ARwBvAGQA+AB5AF8AMgAwADEAMAAuAHAAZABm0hcLGBlXTlMuZGF0YU8RAaAAAAAAAaAAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMo8jSNIKwAAAAjKgw5Hb2S/eV8yMDEwLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADIgJyZJRhwAAAAAAAAAAAAEAAwAACSAAAAAAAAAAAAAAAAAAAAAFR29kv3kAABAACAAAyjxxAwAAABEACAAAyZJDdwAAAAEAFAAIyoMABtPqAAVtzQAFAMgAAL8xAAIARE1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoAR29kv3k6AEdvZL95XzIwMTAucGRmAA4AHgAOAEcAbwBkAPgAeQBfADIAMAAxADAALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADRVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9Hb2TDuHkvR29kw7h5XzIwMTAucGRmABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AyQDOANYCegJ8AoECjAKVAqMCpwKuArcCvALJAswC3gLhAuYAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC6A==},
Bdsk-Url-1 = {http://dx.doi.org/10.3813/AAA.918323}}

@article{Adde:2010,
Abstract = {IM The aim of this study was to investigate the predictive value of a computer-based video analysis of the development of cerebral palsy (CP) in young infants.
METHOD A prospective study of general movements used recordings from 30 high-risk infants (13 males, 17 females; mean gestational age 31wks, SD 6wks; range 23–42wks) between 10 and 15 weeks post term when fidgety movements should be present. Recordings were analysed using computer vision software. Movement variables, derived from differences between subsequent video frames, were used for quantitative analyses. CP status was reported at 5 years.
RESULTS Thirteen infants developed CP (eight hemiparetic, four quadriparetic, one dyskinetic; seven ambulatory, three non-ambulatory, and three unknown function), of whom one had fidgety movements. Variability of the centroid of motion had a sensitivity of 85% and a specificity of 71% in identifying CP. By combining this with variables reflecting the amount of motion, specificity increased to 88%. Nine out of 10 children with CP, and for whom information about functional level was available, were correctly predicted with regard to ambulatory and non-ambulatory
function.
INTERPRETATION Prediction of CP can be provided by computer-based video analysis in young infants. The method may serve as an objective and feasible tool for early prediction of CP in high-risk infants.
},
Author = {Adde, Lars and Helbostad, Jorunn L. and Jensenius, Alexander Refsum and Taraldsen, Gunnar and Grunewaldt, Kristine and St{\o}en, Ragnhild},
Date-Added = {2010-03-10 10:51:46 +0100},
Date-Modified = {2011-12-09 08:26:49 +0000},
Doi = {10.1111/j.1469-8749.2010.03629.x},
Journal = {Developmental Medicine \& Child Neurology},
Number = {8},
Pages = {773–778},
Publisher = {Wiley Online Library},
Title = {Early prediction of cerebral palsy by computer-based video analysis of general movements: a feasibility study},
Url = {http://dx.doi.org/10.1111/j.1469-8749.2010.03629.x},
Volume = {52},
Year = {2010},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QGi4uL1BERnMvQWRkZS9BZGRlXzIwMTAucGRm0hcLGBlXTlMuZGF0YU8RAZYAAAAAAZYAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMo8jSNIKwAAAAbUeg1BZGRlXzIwMTAucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABtSDx7J/FAAAAAAAAAAAAAEAAwAACSAAAAAAAAAAAAAAAAAAAAAEQWRkZQAQAAgAAMo8cQMAAAARAAgAAMeycQQAAAABABQABtR6AAbT6gAFbc0ABQDIAAC/MQACAEJNYWNpbnRvc2ggSEQ6VXNlcnM6AGFsZXhhbmplOgBSZWZlcmVuY2U6AFBERnM6AEFkZGU6AEFkZGVfMjAxMC5wZGYADgAcAA0AQQBkAGQAZQBfADIAMAAxADAALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADBVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9BZGRlL0FkZGVfMjAxMC5wZGYAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgCrALAAuAJSAlQCWQJkAm0CewJ/AoYCjwKUAqECpAK2ArkCvgAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALA},
Bdsk-Url-1 = {http://dx.doi.org/10.1111/j.1469-8749.2010.03629.x}}

@article{Adde:2011,
Abstract = {Background and aims: Absent fidgety movements (FMs) in infants is a strong marker for cerebral palsy. A computer based video analysis tool (General Movement Toolbox, GMT) has been developed to identify infants with absent FMs. Our aim was to evaluate whether a mean of two recordings was superior to one recording in computer based FM classification.

Methods: Fifty-two term and preterm infants (24 boys, 28 girls, 26 preterm; median preterm gestational age 30 wks, range 23-36 wks) were recruited. All infants had two video recordings between 9 and 17 weeks post term age, and FMs were classified based on the Prechtl approach of general movement assessment (GMA). The GMT variable reflecting variability of the centre of the movement (CSD) was used for quantitative FM classification. Areas under receiver operating characteristic curves were used as a measure of strength of the classification model. Logistic regression with leave-one-out cross validation was used.

Results: Median ages at first and second recordings were 11 and 15 weeks post term, respectively. Eighteen of 104 recordings were classified with absent FMs by GMA. When using the computer generated variable CSD, area under curve was 0.82 using the first and 0.81using the second recording. When using the mean value of CSD from both recordings, area under curve increased to 0.88.

Conclusions: Computer based video analysis estimating the variability of the centre of movement (CSD) can be used to classify FMs. A more accurate model is provided by averaging values from two recordings compared to a single recording.},
Author = {Adde, Lars and Langaas, Mette and Jensenius, Alexander Refsum and Helbostad, Jorunn L. and St{\o}en, Ragnhild},
Date-Added = {2012-07-05 07:09:56 +0000},
Date-Modified = {2012-08-21 11:32:29 +0000},
Doi = {10.1038/pr.2011.520},
Journal = {Pediatric Research},
Pages = {295–295},
Title = {Computer Based Assessment of General Movements in Young Infants using One or Two Video Recordings},
Url = {http://dx.doi.org/10.1038/pr.2011.520},
Volume = {70},
Year = {2011},
Bdsk-Url-1 = {http://dx.doi.org/10.1038/pr.2011.520}}

@article{Quay:2011,
Abstract = {The authors present an experimental musical performance called Dance Jockey, wherein sounds are controlled by sensors on the dancer’s body. These sensors manipulate music in real time by acquiring data about body actions and transmitting the information to a control unit that makes decisions and gives instructions to audio software. The system triggers a broad range of music events and maps them to sound effects and musical parameters such as pitch, loudness and rhythm.},
Annote = {http://www.duo.uio.no/sok/work.html?WORKID=148134&fid=89171

http://www.mitpressjournals.org/doi/abs/10.1162/LMJ_a_00052},

Author = {de Quay, Yago and Skogstad, St{\aa}le Andreas van Dorp and Jensenius, Alexander Refsum},
Date-Added = {2011-04-26 08:04:11 +0200},
Date-Modified = {2012-07-03 06:02:01 +0000},
Doi = {10.1162/LMJ_a_00052},
Journal = {Leonardo Music Journal},
Pages = {11–12},
Title = {Dance Jockey: Performing Electronic Music by Dancing},
Url = {http://www.duo.uio.no/sok/work.html?WORKID=148134&fid=89171},
Volume = {21},
Year = {2011},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QMC4uLy4uL0Rlc2t0b3AvbG1qLjIwMTEuLS5pc3N1ZS0yMS5sYXJnZWNvdmVyLmpwZ9IXCxgZV05TLmRhdGFPEQHKAAAAAAHKAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAAFAWYfbG1qLjIwMTEuLS5pc3N1ZS0yMS4jMTYwMjlFLmpwZwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABYCnssJciEAAAAAAAAAAAACAAIAAAkgAAAAAAAAAAAAAAAAAAAAB0Rlc2t0b3AAABAACAAAyjxxAwAAABEACAAAywlkEQAAAAEADAAFAWYABQDIAAC/MQACAEZNYWNpbnRvc2ggSEQ6VXNlcnM6AGFsZXhhbmplOgBEZXNrdG9wOgBsbWouMjAxMS4tLmlzc3VlLTIxLiMxNjAyOUUuanBnAA4ARgAiAGwAbQBqAC4AMgAwADEAMQAuAC0ALgBpAHMAcwB1AGUALQAyADEALgBsAGEAcgBnAGUAYwBvAHYAZQByAC4AagBwAGcADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgA5VXNlcnMvYWxleGFuamUvRGVza3RvcC9sbWouMjAxMS4tLmlzc3VlLTIxLmxhcmdlY292ZXIuanBnAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOAMEAxgDOApwCngKjAq4CtwLFAskC0ALZAt4C6wLuAwADAwMIAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAwo=},
Bdsk-File-2 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QGi4uL1BERnMvUXVheS9RdWF5XzIwMTEucGRm0hcLGBlXTlMuZGF0YU8RAZYAAAAAAZYAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMo8jSNIKwAAABgN1A1RdWF5XzIwMTEucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGA2byw0S6QAAAAAAAAAAAAEAAwAACSAAAAAAAAAAAAAAAAAAAAAEUXVheQAQAAgAAMo8cQMAAAARAAgAAMsNBNkAAAABABQAGA3UAAbT6gAFbc0ABQDIAAC/MQACAEJNYWNpbnRvc2ggSEQ6VXNlcnM6AGFsZXhhbmplOgBSZWZlcmVuY2U6AFBERnM6AFF1YXk6AFF1YXlfMjAxMS5wZGYADgAcAA0AUQB1AGEAeQBfADIAMAAxADEALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADBVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9RdWF5L1F1YXlfMjAxMS5wZGYAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgCrALAAuAJSAlQCWQJkAm0CewJ/AoYCjwKUAqECpAK2ArkCvgAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALA},
Bdsk-Url-1 = {http://www.mitpressjournals.org/doi/abs/10.1162/LMJ_a_00052},
Bdsk-Url-2 = {http://dx.doi.org/10.1162/LMJ_a_00052},
Bdsk-Url-3 = {http://www.duo.uio.no/sok/work.html?WORKID=148134&fid=89171}}

@article{Jensenius:2012,
Abstract = {This article presents the development of the improvisation piece Transformation for electric violin and live electronics. The aim of the project was to develop an “invisible” technological setup that would allow the performer to move freely on stage while still being in full control of the electronics. The developed system consists of a video-based motion-tracking system, with a camera hanging in the ceiling above the stage. The performer’s motion and position on stage is used to control the playback of sonic fragments from a database of violin sounds, using concatenative synthesis as the sound engine. The setup allows the performer to improvise freely together with the electronic sounds being played back as she moves around the “sonic space.” The system has been stable in rehearsal and performance, and the simplicity of the approach has been inspiring to both the performer and the audience.},
Author = {Jensenius, Alexander Refsum and Johnson, Victoria},
Date-Added = {2010-12-17 07:53:56 +0100},
Date-Modified = {2013-01-08 14:51:50 +0000},
Journal = {Computer Music Journal},
Number = {4},
Pages = {28–39},
Title = {Performing the Electric Violin in a Sonic Space},
Volume = {36},
Year = {2012},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDEyaC5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDEyaC5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAF0cKjM2BLhAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAzNgE0QAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAxMmgucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAxADIAaAAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAxMmgucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=}}

@article{Adde:2013,
Abstract = {This study evaluates the role of postterm age at assessment and the use of one or two video recordings for the detection of fidgety movements (FMs) and prediction of cerebral palsy (CP) using computer vision software. Recordings between 9 and 17 weeks postterm age from 52 preterm and term infants (24 boys, 28 girls; 26 born preterm) were used. Recordings were analyzed using computer vision software. Movement variables, derived from differences between subsequent video frames, were used for quantitative analysis. Sensitivities, specificities, and area under curve were estimated for the first and second recording, or a mean of both. FMs were classified based on the Prechtl approach of general movement assessment. CP status was reported at 2 years. Nine children developed CP of whom all recordings had absent FMs. The mean variability of the centroid of motion (C(SD)) from two recordings was more accurate than using only one recording, and identified all children who were diagnosed with CP at 2 years. Age at assessment did not influence the detection of FMs or prediction of CP. The accuracy of computer vision techniques in identifying FMs and predicting CP based on two recordings should be confirmed in future studies.},
Author = {Adde, Lars and Helbostad, Jorunn and Jensenius, Alexander R and Langaas, Mette and St{\o}en, Ragnhild},
Date-Added = {2013-03-11 20:10:56 +0000},
Date-Modified = {2013-03-13 21:00:59 +0000},
Doi = {10.3109/09593985.2012.757404},
Journal = {Physiotherapy theory and practice},
Pages = {1–7},
Publisher = {Informa Healthcare New York, USA},
Title = {Identification of fidgety movements and prediction of {CP} by the use of computer-based video analysis is more accurate when based on two video recordings},
Year = {2013},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QGi4uL1BERnMvQWRkZS9BZGRlXzIwMTMucGRm0hcLGBlXTlMuZGF0YU8RAZYAAAAAAZYAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMo8jSNIKwAAAAbUeg1BZGRlXzIwMTMucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB1Ao4zWUHXAAAAAAAAAAAAAEAAwAACSAAAAAAAAAAAAAAAAAAAAAEQWRkZQAQAAgAAMo8cQMAAAARAAgAAM1k+UwAAAABABQABtR6AAbT6gAFbc0ABQDIAAC/MQACAEJNYWNpbnRvc2ggSEQ6VXNlcnM6AGFsZXhhbmplOgBSZWZlcmVuY2U6AFBERnM6AEFkZGU6AEFkZGVfMjAxMy5wZGYADgAcAA0AQQBkAGQAZQBfADIAMAAxADMALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADBVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9BZGRlL0FkZGVfMjAxMy5wZGYAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgCrALAAuAJSAlQCWQJkAm0CewJ/AoYCjwKUAqECpAK2ArkCvgAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALA}}

@article{Jensenius:2013b,
Abstract = {The conceptual starting point for an `action–sound approach’ to teaching music technology is the acknowledgment of the couplings that exist in acoustic instruments between sounding objects, sound-producing actions and the resultant sounds themselves. Digital music technologies, on the other hand, are not limited to such natural couplings, but allow for arbitrary new relationships to be created between objects, actions and sounds. The endless possibilities of such virtual action–sound relationships can be exciting and creatively inspiring, but they can also lead to frustration among performers and confusion for audiences. This paper presents the theoretical foundations for an action–sound approach to electronic instrument design and discusses the ways in which this approach has shaped the undergraduate course titled `Interactive Music’ at the University of Oslo. In this course, students start out by exploring various types of acoustic action–sound couplings before moving on to designing, building, performing and evaluating both analogue and digital electronic instruments from an action–sound perspective.},
Author = {Jensenius, Alexander Refsum},
Date-Added = {2012-12-12 19:13:11 +0000},
Date-Modified = {2013-05-13 18:18:35 +0000},
Doi = {10.1017/S1355771813000095},
Journal = {Organised Sound},
Number = {2},
Pages = {178–189},
Title = {An Action–Sound Approach to Teaching Interactive Music},
Volume = {18},
Year = {2013},
Bdsk-Url-1 = {http://dx.doi.org/10.1017/S1355771813000095}}

@article{Nymoen:2013,
Annote = {
K. Nymoen, R.I. God{\o}y, A.R. Jensenius, and J. Torresen. Submitted for (in review)},
Author = {Nymoen, Kristian and God{\o}y, Rolf Inge and Jensenius, Alexander Refsum and T{\o}rresen, Jim},
Date-Added = {2012-10-15 10:58:59 +0000},
Date-Modified = {2012-12-31 10:15:07 +0000},
Journal = {ACM Transactions on Applied Perception},
Title = {Analysing Correspondences of Sound Objects and Body Motion},
Year = {2013, forthcoming}}

@article{Jensenius:2012f,
Abstract = {We present the results of a series of observation studies of ourselves standing still on the floor for 10 minutes at a time. The aim has been to understand more about our own standstill, and to develop a heightened sensitivity for micromovements and how they can be used in music and dance performance. The quantity of motion, calculated from motion capture data of a head marker, reveals remarkably similar results for each person, and also between persons. The best results were obtained with the feet at the width of the shoulders, locked knees, and eyes open. No correlation was found between different types of mental strategies employed and the quantity of motion of the head marker, but we still believe that different mental strategies have an important subjective and communicative impact. The findings will be used in the development of a stage performance focused on micromovements.},
Author = {Jensenius, Alexander Refsum and Bjerkestrand, Kari Anne Vadstensvik and Johnson, Victoria},
Date-Added = {2012-02-21 06:38:57 +0000},
Date-Modified = {2013-01-10 16:04:15 +0000},
Journal = {International Journal of Arts and Technology},
Number = {2},
Title = {How Still is still? Exploring Human Standstill for Artistic Applications},
Volume = {6},
Year = {2013}}

@article{Jensenius:2013,
Abstract = {This paper presents an overview of techniques for creating visual displays of human body movement based on video recordings. First a review of early movement and video visualization techniques is given. Then follows an overview of techniques that the author has developed and used in the study of music-related body movements: motion history images, motion average images, motion history keyframe images and motiongrams. Finally, examples are given of how such visualization techniques have been used in empirical music research, in medical research and for creative applications.},
Author = {Jensenius, Alexander Refsum},
Date-Added = {2010-10-04 13:48:30 +0200},
Date-Modified = {2013-01-09 18:39:51 +0000},
Journal = {Leonardo},
Number = {1},
Pages = {53–60},
Title = {Some video abstraction techniques for displaying body movement in analysis and performance},
Volume = {46},
Year = {2013},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJC4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDEzLnBkZtIXCxgZV05TLmRhdGFPEQG6AAAAAAG6AAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAAMwroSSmVuc2VuaXVzXzIwMTMucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAZm2oc0TdOMAAAAAAAAAAAABAAMAAAkgAAAAAAAAAAAAAAAAAAAACUplbnNlbml1cwAAEAAIAADKPHEDAAAAEQAIAADNE2bTAAAAAQAUAAzCugAG0+oABW3NAAUAyAAAvzEAAgBMTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBKZW5zZW5pdXM6AEplbnNlbml1c18yMDEzLnBkZgAOACYAEgBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAxADMALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADpVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9KZW5zZW5pdXMvSmVuc2VuaXVzXzIwMTMucGRmABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AtQC6AMICgAKCAocCkgKbAqkCrQK0Ar0CwgLPAtIC5ALnAuwAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC7g==},
Bdsk-File-2 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDEzYS5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDEzYS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGZzJ7NE3x7AAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAzRNuawAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAxM2EucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAxADMAYQAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAxM2EucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=}}

@article{Jensenius:2013a,
Author = {Jensenius, Alexander Refsum and God{\o}y, Rolf Inge},
Date-Added = {2012-10-03 08:23:01 +0000},
Date-Modified = {2013-04-07 17:30:42 +0000},
Journal = {Empirical Musicology Review},
Title = {Sonifying the shape of human body motion using motiongrams},
Year = {under review}}

@book{Jensenius:2009,
Abstract = {Denne boken gir en helhetlig presentasjon av hvordan vi kan forst{\aa} musikkrelaterte bevegelser. Forfatteren viser hvordan man kan studere slike bevegelser med utgangspunkt i et tverrfaglig teorigrunnlag. Det gis en innf{\o}ring b{\aa}de i kvalitative og kvantitative metoder som brukes i forskningen innenfor feltet. Eksempler er tatt fra studier av musikere, dansere og folk som beveger seg til musikk. Boken viser hvordan kropp og bevegelse er en naturlig del av musikkopplevelsen, og det argumenteres for et st{\o}rre fokus p{\aa} musikkrelaterte bevegelser i forskning og undervisning.},
Address = {Oslo},
Author = {Jensenius, Alexander Refsum},
Date-Added = {2009-01-15 23:09:23 +0100},
Date-Modified = {2011-11-07 19:37:00 +0000},
Publisher = {Unipub},
Title = {Musikk og bevegelse},
Url = {http://musikkogbevegelse.no/},
Year = {2009},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJC4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDA5LnBkZtIXCxgZV05TLmRhdGFPEQG6AAAAAAG6AAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAAMwroSSmVuc2VuaXVzXzIwMDkucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAzEmcbVe7gAAAAAAAAAAAABAAMAAAkgAAAAAAAAAAAAAAAAAAAACUplbnNlbml1cwAAEAAIAADKPHEDAAAAEQAIAADG1V+YAAAAAQAUAAzCugAG0+oABW3NAAUAyAAAvzEAAgBMTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBKZW5zZW5pdXM6AEplbnNlbml1c18yMDA5LnBkZgAOACYAEgBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAwADkALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADpVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9KZW5zZW5pdXMvSmVuc2VuaXVzXzIwMDkucGRmABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AtQC6AMICgAKCAocCkgKbAqkCrQK0Ar0CwgLPAtIC5ALnAuwAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC7g==},
Bdsk-File-2 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDA5YS5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDA5YS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMxJ/G1tU0AAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAxta5FAAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAwOWEucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAwADkAYQAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAwOWEucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-Url-1 = {http://musikkogbevegelse.no/}}

@book{Jensenius:2011b,
Address = {Oslo},
Author = {Jensenius, Alexander Refsum and Halmrast, Tor and God{\o}y, Rolf Inge},
Booktitle = {Musikk og lyd},
Date-Added = {2010-12-17 07:56:08 +0100},
Date-Modified = {2012-12-31 10:16:21 +0000},
Publisher = {Unipub},
Title = {Musikk og lyd},
Year = {In preparation}}

@incollection{Jensenius:2006a,
Abstract = {This paper presents our work on building low-cost music controllers intended for educational and creative use. The main idea was to build an electronic music controller, including sensors and a sensor interface, on a “10 euro” budget. We have experimented with turning commercially available USB game controllers into generic sensor interfaces, and making sensors from cheap conductive materials such as latex, ink, porous materials, and video tape. Our prototype controller, the CheapStick, is comparable to interfaces built with commercially available sensors and interfaces, but at a fraction of the price. },
Address = {Berlin Heidelberg},
Author = {Jensenius, Alexander Refsum and Koehly, Rodolphe and Wanderley, Marcelo M.},
Booktitle = {CMMR 2005},
Date-Added = {2007-03-31 16:09:23 +0200},
Date-Modified = {2012-08-15 14:01:06 +0000},
Doi = {http://dx.doi.org/10.1007/11751069_11},
Editor = {Kronland-Martinet, R. and Voinier, T. and Ystad, S.},
Local-Url = {file://localhost/Users/alexanje/Reference/Bibdesk/Jensenius/Jensenius_2006a.pdf},
Pages = {123–129},
Publisher = {Springer},
Series = {LNCS},
Title = {Building Low-Cost Music Controllers},
Url = {http://www.duo.uio.no/sok/work.html?WORKID=137615&fid=85970},
Volume = {3902},
Webpdf = {http://www.duo.uio.no/sok/work.html?WORKID=137615&fid=85970},
Year = {2006},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDA2YS5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDA2YS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMxC3BotFYAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAwaLDSAAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAwNmEucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAwADYAYQAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAwNmEucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-Url-1 = {http://www.duo.uio.no/sok/work.html?WORKID=137615&fid=85970},
Bdsk-Url-2 = {http://dx.doi.org/10.1007/11751069_11}}

@incollection{Godoy:2006,
Abstract = {Both musicians and non-musicians can often be seen making sound-producing gestures in the air without touching any real instruments. Such ”air playing” can be regarded as an expression of how people perceive and imagine music, and studying the relationships between these gestures and sound might contribute to our knowledge of how gestures help structure our experience of music.},
Address = {Berlin Heidelberg},
Annote = {GW2005, LNAI 3881},
Author = {God{\o}y, Rolf Inge and Haga, Egil and Jensenius, Alexander Refsum},
Booktitle = {Gesture in Human-Computer Interaction and Simulation, 6th International Gesture Workshop},
Date-Added = {2006-02-16 20:01:04 -0500},
Date-Modified = {2012-08-15 14:01:13 +0000},
Doi = {http://urn.nb.no/URN:NBN:no-29814},
Editor = {Gibet, Sylvie and Courty, Nicolas and Kamp, Jean-Fran{\c c}ois},
Keywords = {Musical Gestures},
Local-Url = {file://localhost/Users/alexanje/Reference/Bibdesk/God%C3%B8y/God%C3%B8y_2006a.pdf},
Pages = {256–267},
Publisher = {Springer},
Series = {LNAI},
Title = {Playing `Air Instruments': Mimicry of Sound-producing Gestures by Novices and Experts},
Url = {http://www.springerlink.com/content/w67h353315qt1152/},
Volume = {3881},
Webpdf = {http://urn.nb.no/URN:NBN:no-29814},
Year = {2006},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YW8QHQAuAC4ALwBQAEQARgBzAC8ARwBvAGQA+AB5AC8ARwBvAGQA+AB5AF8AMgAwADAANgBhAC4AcABkAGbSFwsYGVdOUy5kYXRhTxEBpgAAAAABpgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAACMqDD0dvZL95XzIwMDZhLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKkoPBn1LmAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAVHb2S/eQAAEAAIAADKPHEDAAAAEQAIAADBn0TWAAAAAQAUAAjKgwAG0+oABW3NAAUAyAAAvzEAAgBFTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBHb2S/eToAR29kv3lfMjAwNmEucGRmAAAOACAADwBHAG8AZAD4AHkAXwAyADAAMAA2AGEALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADVVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9Hb2TDuHkvR29kw7h5XzIwMDZhLnBkZgAAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgDLANAA2AKCAoQCiQKUAp0CqwKvArYCvwLEAtEC1ALmAukC7gAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALw},
Bdsk-Url-1 = {http://www.springerlink.com/content/w67h353315qt1152/},
Bdsk-Url-2 = {http://urn.nb.no/URN:NBN:no-29814}}

@incollection{Jensenius:2010,
Abstract = {We experience and understand the world, including music, through body movement?when we hear something, we are able to make sense of it by relating it to our body movements, or form an image in our minds of body movements. Musical Gestures is a collection of essays that explore the relationship between sound and movement. It takes an interdisciplinary approach to the fundamental issues of this subject, drawing on ideas, theories and methods from disciplines such as musicology, music perception, human movement science, cognitive psychology, and computer science.},
Address = {New York},
Author = {Jensenius, Alexander Refsum and Wanderley, Marcelo M. and God{\o}y, Rolf Inge and Leman, Marc},
Booktitle = {Musical gestures: Sound, movement, and meaning},
Date-Added = {2010-01-31 14:27:05 +0100},
Date-Modified = {2012-12-05 14:02:25 +0000},
Editor = {God{\o}y, Rolf Inge and Leman, Marc},
Pages = {12–35},
Publisher = {Routledge},
Title = {Musical gestures: Concepts and methods in research},
Url = {http://books.google.no/books?id=bLhHWo_hT6QC&lpg=PA12&ots=y-ndfPuxsQ&dq=Musical%20Gestures%3A%20concepts%20and%20methods%20in%20research&pg=PA12#v=onepage&q=Musical%20Gestures:%20concepts%20and%20methods%20in%20research&f=false},
Year = {2010},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDEwYy5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDEwYy5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMxfvGuyEJAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAxrsE6QAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAxMGMucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAxADAAYwAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAxMGMucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-File-2 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDEwZS5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDEwZS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMxgbJ0boDAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAydGd4wAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAxMGUucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAxADAAZQAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAxMGUucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-Url-1 = {http://books.google.no/books?id=bLhHWo_hT6QC&lpg=PA12&ots=y-ndfPuxsQ&dq=Musical%20Gestures%3A%20concepts%20and%20methods%20in%20research&pg=PA12#v=onepage&q=Musical%20Gestures:%20concepts%20and%20methods%20in%20research&f=false}}

@incollection{Nymoen:2012,
Abstract = {This paper presents an experiment on sound tracing, meaning an experiment on how people relate motion to sound. 38 participants were presented with 18 short sounds, and instructed to move their hands in the air while acting as though the sound was created by their hand motion. The hand motion of the participants was recorded, and has been analyzed using statistical tests, comparing results between different sounds, between different subjects, and between different sound classes. We have identified several relationships between sound and motion which are present in the majority of the subjects. A clear distinction was found in onset acceleration for motion to sounds with an impulsive dynamic envelope compared to non-impulsive sounds. Furthermore, vertical movement has been shown to be related to sound frequency, both in terms of spectral centroid and pitch. Moreover, a significantly higher amount of overall acceleration was observed for non-pitched sounds as compared to pitched sounds.},
Address = {Berlin Heidelberg},
Author = {Nymoen, Kristian and God{\o}y, Rolf Inge and Torresen, Jim and Jensenius, Alexander Refsum},
Booktitle = {Speech, Sound and Music Processing: Embracing Research in India},
Date-Added = {2012-01-12 18:42:31 +0000},
Date-Modified = {2012-08-17 13:36:31 +0000},
Doi = {10.1007/978-3-642-31980-8_11},
Editor = {Ystad, S{\o}lvi and Aramaki, Mitsuko and Kronland-Martinet, Richard and Jensen, Kristoffer and Mohanty, Sanghamitra},
Pages = {120–145},
Publisher = {Springer},
Series = {Lecture Notes in Computer Science},
Title = {A Statistical Approach to Analyzing Sound Tracings},
Url = {http://dx.doi.org/10.1007/978-3-642-31980-8_11},
Volume = {7172},
Year = {2012},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QHi4uL1BERnMvTnltb2VuL055bW9lbl8yMDEyLnBkZtIXCxgZV05TLmRhdGFPEQGkAAAAAAGkAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAANC3wPTnltb2VuXzIwMTIucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKaSJswbPvkAAAAAAAAAAAABAAMAAAkgAAAAAAAAAAAAAAAAAAAABk55bW9lbgAQAAgAAMo8cQMAAAARAAgAAMwbItkAAAABABQADQt8AAbT6gAFbc0ABQDIAAC/MQACAEZNYWNpbnRvc2ggSEQ6VXNlcnM6AGFsZXhhbmplOgBSZWZlcmVuY2U6AFBERnM6AE55bW9lbjoATnltb2VuXzIwMTIucGRmAA4AIAAPAE4AeQBtAG8AZQBuAF8AMgAwADEAMgAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIANFVzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL055bW9lbi9OeW1vZW5fMjAxMi5wZGYAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgCvALQAvAJkAmYCawJ2An8CjQKRApgCoQKmArMCtgLIAssC0AAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALS},
Bdsk-Url-1 = {http://dx.doi.org/10.1007/978-3-642-31980-8_11}}

@incollection{Jensenius:2012g,
Abstract = {We present the results of a pilot study on how micromovements may be used in an interactive dance/music performance. Micromovements are subtle body movements that cannot be easily seen by the human eye. Using an infrared marker-based motion capture system we have explored micromovements through 15×10 minutes long observation studies of ourselves standing still or moving slowly. The results from these studies show that there are both consistent and inconsistent movement patterns to be found at various temporal levels. Experimentation with three different types of realtime sonification shows artistic potential in using micromovements in dance and music performance.},
Address = {Berlin},
Annote = {Arts and Technology, Second International Conference, ArtsIT 2011, Esbjerg, Denmark, December 7-8, 2011, Revised Selected Papers
Proceedings of Second International ICST Conference on Arts and Technology

http://www.springer.com/computer/information+systems+and+applications/book/978-3-642-33328-6},

Author = {Jensenius, Alexander Refsum and Bjerkestrand, Kari Anne Vadstensvik},
Booktitle = {Arts and Technology, Revised Selected Papers},
Date-Added = {2011-12-14 20:40:58 +0000},
Date-Modified = {2012-10-03 09:54:21 +0000},
Editor = {Brooks, Anthony L.},
Pages = {100–107},
Publisher = {Springer},
Series = {LNICST},
Title = {Exploring micromovements with motion capture and sonification},
Url = {http://www.springerlink.com/content/j04650123p105646/},
Volume = {101},
Year = {2012},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDEyZi5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDEyZi5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEJ64PMb2oJAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAzG9N6QAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAxMmYucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAxADIAZgAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAxMmYucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-Url-1 = {http://www.springer.com/computer/information+systems+and+applications/book/978-3-642-33328-6},
Bdsk-Url-2 = {http://www.springerlink.com/content/j04650123p105646/fulltext.pdf}}

@incollection{Jensenius:2012c,
Abstract = {The chapter starts by discussing the importance of body movement in both music performance and perception, and argues that for future research in the field it is important to develop solutions for being able to stream and store music-related movement data alongside other types of musical information. This is followed by a suggestion for a multilayered approach to structuring movement data, where each of the layers represent a separate and consistent subset of information. Finally, examples of two prototype implementations are presented: a setup for storing GDIF-data into SDIF-files, and an example of how GDIF-based OSC streams can allow for more flexible and meaningful mapping from controller to sound engine.},
Address = {Hershey, PA},
Author = {Jensenius, Alexander Refsum},
Booktitle = {Structuring Music through Markup Language: Designs and Architectures},
Date-Added = {2011-04-17 10:55:56 +0200},
Date-Modified = {2012-10-15 10:20:05 +0000},
Editor = {Steyn, Jacques},
Pages = {135–155},
Publisher = {IGI},
Title = {Structuring music-related movements},
Url = {http://musicmarkup.info/book/index.html},
Year = {2012},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDEyZy5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDEyZy5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFhPNbMxuJOAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAzMbUPgAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAxMmcucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAxADIAZwAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAxMmcucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-Url-1 = {http://musicmarkup.info/book/index.html}}

@inproceedings{Jensenius:2004a,
Author = {Jensenius, Alexander Refsum},
Booktitle = {Proceedings of the Leeds International Music Technology Education Conference},
Date-Added = {2005-02-25 16:34:49 -0500},
Date-Modified = {2012-08-15 13:54:11 +0000},
Title = {The Interactive Mass: An Environment for Multimedia Improvisation},
Year = {2004}}

@inproceedings{Godoy:2004,
Address = {University of Oslo},
Author = {God{\o}y, Rolf Inge and Haga, Egil and Jensenius, Alexander Refsum},
Booktitle = {ConGAS WG1 Seminar: Basic Issues of Gesture and Musical Sound},
Date-Modified = {2007-04-16 11:57:43 +0200},
Keywords = {Musical Gestures},
Title = {Motormimetic sketching and the novice-expert continuum.},
Year = {2004},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YW8QHQAuAC4ALwBQAEQARgBzAC8ARwBvAGQA+AB5AC8ARwBvAGQA+AB5AF8AMgAwADAANABhAC4AcABkAGbSFwsYGVdOUy5kYXRhTxEBpgAAAAABpgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAACMqDD0dvZL95XzIwMDRhLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKknvKw53xAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAVHb2S/eQAAEAAIAADKPHEDAAAAEQAIAADKw4HRAAAAAQAUAAjKgwAG0+oABW3NAAUAyAAAvzEAAgBFTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBHb2S/eToAR29kv3lfMjAwNGEucGRmAAAOACAADwBHAG8AZAD4AHkAXwAyADAAMAA0AGEALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADVVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9Hb2TDuHkvR29kw7h5XzIwMDRhLnBkZgAAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgDLANAA2AKCAoQCiQKUAp0CqwKvArYCvwLEAtEC1ALmAukC7gAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALw}}

@inproceedings{Jensenius:2005a,
Address = {Pisa, Italy},
Author = {Jensenius, Alexander Refsum and Koehly, Rodolphe and Wanderley, Marcelo M.},
Booktitle = {Proceedings of the 3rd International Symposium on Computer Music Modelling and Retrieval},
Date-Added = {2005-05-08 21:49:13 -0400},
Date-Modified = {2012-08-15 13:55:43 +0000},
Pages = {252–256},
Title = {Building Low-Cost Music Controllers},
Year = {2005},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDA1Yi5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDA1Yi5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMw9W/Pl8QAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAvz5C8AAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAwNWIucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAwADUAYgAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAwNWIucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=}}

@inproceedings{Casciato:2005,
Address = {Porto, Portugal},
Author = {Casciato, Carmine and Jensenius, Alexander Refsum and Wanderley, Marcelo M.},
Booktitle = {Proceedings of ESCOM 2005 Performance Matters! Conference},
Date-Added = {2005-05-08 21:50:03 -0400},
Date-Modified = {2007-07-07 19:33:27 +0200},
Local-Url = {file://localhost/Users/alexanje/Reference/Bibdesk/Casciato/Casciato_2005.pdf},
Title = {Studying Free Dance Movement to Music},
Year = {2005},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QIi4uL1BERnMvQ2FzY2lhdG8vQ2FzY2lhdG9fMjAwNS5wZGbSFwsYGVdOUy5kYXRhTxEBsgAAAAABsgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAABvfYEUNhc2NpYXRvXzIwMDUucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAG99nBotIkAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAhDYXNjaWF0bwAQAAgAAMo8cQMAAAARAAgAAMGixBQAAAABABQABvfYAAbT6gAFbc0ABQDIAAC/MQACAEpNYWNpbnRvc2ggSEQ6VXNlcnM6AGFsZXhhbmplOgBSZWZlcmVuY2U6AFBERnM6AENhc2NpYXRvOgBDYXNjaWF0b18yMDA1LnBkZgAOACQAEQBDAGEAcwBjAGkAYQB0AG8AXwAyADAAMAA1AC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgA4VXNlcnMvYWxleGFuamUvUmVmZXJlbmNlL1BERnMvQ2FzY2lhdG8vQ2FzY2lhdG9fMjAwNS5wZGYAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgCzALgAwAJ2AngCfQKIApECnwKjAqoCswK4AsUCyALaAt0C4gAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALk}}

@inproceedings{Godoy:2005,
Abstract = {Both musicians and non-musicians can often be seen making sound-producing gestures in the air without touching any real instruments. Such ”air playing” can be regarded as an expression of how people perceive and imagine music, and studying the relationships between these gestures and sound might contribute to our knowledge of how gestures help structure our experience of music.},
Address = {Vannes, France},
Author = {God{\o}y, Rolf Inge and Haga, Egil and Jensenius, Alexander Refsum},
Booktitle = {Proceedings of the 6th International Gesture Workshop},
Date-Added = {2005-02-25 16:23:51 -0500},
Date-Modified = {2012-08-15 13:56:15 +0000},
Title = {Playing `Air Instruments': Mimicry of Sound-producing Gestures by Novices and Experts},
Year = {2005}}

@inproceedings{Godoy:2006b,
Abstract = {This is an exploration of listeners association of gestures with musical sounds. The subjects listen to sounds that have been chosen for various salient features, and the tracing movements made by the subjects are recorded and subsequently compared in view of common features in the tracings. },
Address = {Leeds},
Author = {God{\o}y, Rolf Inge and Haga, Egil and Jensenius, Alexander Refsum},
Booktitle = {Proceedings of the COST287-ConGAS 2nd International Symposium on Gesture Interfaces for Multimedia Systems},
Date-Added = {2007-01-12 10:04:00 +0100},
Date-Modified = {2011-12-30 14:09:37 +0000},
Editor = {Ng, Kia},
Local-Url = {file://localhost/Users/alexanje/Reference/Bibdesk/God%C3%B8y/God%C3%B8y_2006b.pdf},
Pages = {27–33},
Title = {Exploring Music-Related Gestures by Sound-Tracing – A Preliminary Study},
Url = {http://www.duo.uio.no/sok/work.html?WORKID=148115&fid=89150},
Year = {2006},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YW8QHQAuAC4ALwBQAEQARgBzAC8ARwBvAGQA+AB5AC8ARwBvAGQA+AB5AF8AMgAwADAANgBiAC4AcABkAGbSFwsYGVdOUy5kYXRhTxEBpgAAAAABpgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAACMqDD0dvZL95XzIwMDZiLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKkqvB0WaEAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAVHb2S/eQAAEAAIAADKPHEDAAAAEQAIAADB0Vh0AAAAAQAUAAjKgwAG0+oABW3NAAUAyAAAvzEAAgBFTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBHb2S/eToAR29kv3lfMjAwNmIucGRmAAAOACAADwBHAG8AZAD4AHkAXwAyADAAMAA2AGIALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADVVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9Hb2TDuHkvR29kw7h5XzIwMDZiLnBkZgAAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgDLANAA2AKCAoQCiQKUAp0CqwKvArYCvwLEAtEC1ALmAukC7gAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALw},
Bdsk-Url-1 = {http://www.duo.uio.no/sok/work.html?WORKID=148115&fid=89150}}

@inproceedings{Hansen:2006,
Abstract = {This paper describes the concept and realization of The Drum Pants, a pair of pants with sensors and control switches, allowing the performer to play and record a virtual drum set or percussion rack by hitting the thighs and waist with the hands. The main idea is to make a virtual percussion instrument with a high level of bodily control and which permits new visual performance possibilities.
},
Address = {Pite{\aa}, Sweden},
Author = {Hansen, S{\o}ren Holme and Jensenius, Alexander Refsum},
Booktitle = {Proceedings of Audio Mostly 2006 – A Conference on Sound in Games},
Date-Added = {2006-07-10 16:09:57 +0200},
Date-Modified = {2012-08-15 13:56:50 +0000},
Local-Url = {file://localhost/Users/alexanje/Reference/Bibdesk/Hansen/Hansen_2006.pdf},
Pages = {60–63},
Read = {Yes},
Title = {Drum Pants},
Url = {http://www.duo.uio.no/sok/work.html?WORKID=140676},
Webpdf = {http://www.duo.uio.no/sok/work.html?WORKID=140676},
Year = {2006},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QHi4uL1BERnMvSGFuc2VuL0hhbnNlbl8yMDA2LnBkZtIXCxgZV05TLmRhdGFPEQGkAAAAAAGkAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAAMrzgPSGFuc2VuXzIwMDYucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAyvOcG4LPMAAAAAAAAAAAABAAMAAAkgAAAAAAAAAAAAAAAAAAAABkhhbnNlbgAQAAgAAMo8cQMAAAARAAgAAMG4HuMAAAABABQADK84AAbT6gAFbc0ABQDIAAC/MQACAEZNYWNpbnRvc2ggSEQ6VXNlcnM6AGFsZXhhbmplOgBSZWZlcmVuY2U6AFBERnM6AEhhbnNlbjoASGFuc2VuXzIwMDYucGRmAA4AIAAPAEgAYQBuAHMAZQBuAF8AMgAwADAANgAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIANFVzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0hhbnNlbi9IYW5zZW5fMjAwNi5wZGYAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgCvALQAvAJkAmYCawJ2An8CjQKRApgCoQKmArMCtgLIAssC0AAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALS},
Bdsk-Url-1 = {http://www.duo.uio.no/sok/work.html?WORKID=140676}}

@inproceedings{Marshall:2006,
Abstract = {This paper presents our current approach in using a Polhemus Liberty electromagnetic tracker for controlling spatialization in a performance setup for small ensemble. We are developing a Gesture Description Interchange Format (GDIF) to standardize the way gesture-related information is stored and shared in a networked computer setup. Examples are given of our current GDIF namespace, the gesture tracking subsystem developed to use this namespace and patches written to control spatialization and mapping using gesture data. },
Address = {New Orleans, LA},
Annote = {ViMiC and setup
GDIF: Polhemus implementation},
Author = {Marshall, Mark T. and Peters, Nils and Jensenius, Alexander Refsum and Boissinot, Julien and Wanderley, Marcelo M. and Braasch, Jonas},
Booktitle = {Proceedings of the International Computer Music Conference},
Date-Added = {2006-06-21 09:53:10 +0200},
Date-Modified = {2011-12-08 20:35:41 +0000},
Keywords = {Musical Gestures; Mapping},
Local-Url = {file://localhost/Users/alexanje/Reference/Bibdesk/Marshall/Marshall_2006.pdf},
Pages = {360–366},
Publisher = {San Francisco: ICMA},
Title = {On the Development of a System for Gesture Control of Spatialization},
Url = {http://urn.nb.no/URN:NBN:no-21799},
Webpdf = {http://urn.nb.no/URN:NBN:no-21799},
Year = {2006},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QIi4uL1BERnMvTWFyc2hhbGwvTWFyc2hhbGxfMjAwNi5wZGbSFwsYGVdOUy5kYXRhTxEBsgAAAAABsgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADPLPEU1hcnNoYWxsXzIwMDYucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAM82PBotFxAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAhNYXJzaGFsbAAQAAgAAMo8cQMAAAARAAgAAMGiw2EAAAABABQADPLPAAbT6gAFbc0ABQDIAAC/MQACAEpNYWNpbnRvc2ggSEQ6VXNlcnM6AGFsZXhhbmplOgBSZWZlcmVuY2U6AFBERnM6AE1hcnNoYWxsOgBNYXJzaGFsbF8yMDA2LnBkZgAOACQAEQBNAGEAcgBzAGgAYQBsAGwAXwAyADAAMAA2AC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgA4VXNlcnMvYWxleGFuamUvUmVmZXJlbmNlL1BERnMvTWFyc2hhbGwvTWFyc2hhbGxfMjAwNi5wZGYAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgCzALgAwAJ2AngCfQKIApECnwKjAqoCswK4AsUCyALaAt0C4gAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALk},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-21799}}

@inproceedings{Jensenius:2006c,
Abstract = {Navigating hours of video material is often time-consuming, and traditional keyframe displays are not particularly useful when studying single-shot studio recordings of music-related movement. This paper presents the idea of motiongrams and how we use such displays in our studies of dancers’ free move- ments to music.},
Address = {New Orleans, LA},
Author = {Jensenius, Alexander Refsum},
Booktitle = {Proceedings of the International Computer Music Conference},
Date-Added = {2006-06-21 09:52:21 +0200},
Date-Modified = {2013-01-15 19:10:19 +0000},
Local-Url = {file://localhost/Users/alexanje/Reference/Bibdesk/Jensenius/Jensenius_2006.pdf},
Pages = {499–502},
Title = {Using Motiongrams in the Study of Musical Gestures},
Url = {http://urn.nb.no/URN:NBN:no-21798},
Webpdf = {http://urn.nb.no/URN:NBN:no-21798},
Year = {2006},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJC4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDA2LnBkZtIXCxgZV05TLmRhdGFPEQG6AAAAAAG6AAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAAMwroSSmVuc2VuaXVzXzIwMDYucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAzEKMGi0XsAAAAAAAAAAAABAAMAAAkgAAAAAAAAAAAAAAAAAAAACUplbnNlbml1cwAAEAAIAADKPHEDAAAAEQAIAADBosNrAAAAAQAUAAzCugAG0+oABW3NAAUAyAAAvzEAAgBMTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBKZW5zZW5pdXM6AEplbnNlbml1c18yMDA2LnBkZgAOACYAEgBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAwADYALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADpVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9KZW5zZW5pdXMvSmVuc2VuaXVzXzIwMDYucGRmABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AtQC6AMICgAKCAocCkgKbAqkCrQK0Ar0CwgLPAtIC5ALnAuwAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC7g==},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-21798}}

@inproceedings{Jensenius:2006b,
Address = {Manchester},
Author = {Jensenius, Alexander Refsum},
Booktitle = {Second International Conference on Music and Gesture},
Date-Added = {2006-05-19 18:05:07 -0400},
Date-Modified = {2012-08-15 14:02:03 +0000},
Keywords = {Musical Gestures},
Title = {Developing Tools for Analysing Musical Gestures},
Year = {2006}}

@inproceedings{Casciato:2006,
Address = {Montreal},
Author = {Casciato, Carmine and Jensenius, Alexander Refsum and Wanderley, Marcelo M.},
Booktitle = {ACFAS: L’interdisciplinarit{\’e} dans les sciences et technologies de la musique},
Date-Added = {2006-05-19 17:57:52 -0400},
Date-Modified = {2012-08-15 13:57:20 +0000},
Local-Url = {file://localhost/Users/alexanje/Reference/Bibdesk/Casciato/Casciato_2006.pdf},
Title = {La mouvement libre de danse et la musique},
Year = {2006},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QIi4uL1BERnMvQ2FzY2lhdG8vQ2FzY2lhdG9fMjAwNi5wZGbSFwsYGVdOUy5kYXRhTxEBsgAAAAABsgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAABvfYEUNhc2NpYXRvXzIwMDYucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAG+KPAkX4pAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAhDYXNjaWF0bwAQAAgAAMo8cQMAAAARAAgAAMCRYgkAAAABABQABvfYAAbT6gAFbc0ABQDIAAC/MQACAEpNYWNpbnRvc2ggSEQ6VXNlcnM6AGFsZXhhbmplOgBSZWZlcmVuY2U6AFBERnM6AENhc2NpYXRvOgBDYXNjaWF0b18yMDA2LnBkZgAOACQAEQBDAGEAcwBjAGkAYQB0AG8AXwAyADAAMAA2AC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgA4VXNlcnMvYWxleGFuamUvUmVmZXJlbmNlL1BERnMvQ2FzY2lhdG8vQ2FzY2lhdG9fMjAwNi5wZGYAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgCzALgAwAJ2AngCfQKIApECnwKjAqoCswK4AsUCyALaAt0C4gAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALk}}

@inproceedings{Jensenius:2006,
Abstract = {This paper presents our need for a Gesture Description Interchange Format (GDIF) for storing, retrieving and sharing information about music-related gestures. Ideally, it should be possible to store all sorts of data from various commercial and custom made controllers, motion capture and computer vision systems, as well as results from different types of gesture analysis, in a coherent and consistent way. This would make it possible to use the information with different software, platforms and devices, and also allow for sharing data between research institutions. We present some of the data types that should be included, and discuss issues which need to be resolved.
},
Address = {Paris},
Annote = {ISBN: 2-84426-314-3
ISBN: 978-2-84426-314-8

Eds: Schnell, Norbert and Bevilacqua, Fr{\’e}d{\’e}ric and Lyons, Michael and Tanaka, Atau

Proceedings of the International Conference on New Interfaces for Musical Expression},
Author = {Jensenius, Alexander Refsum and Kvifte, Tellef and God{\o}y, Rolf Inge},
Booktitle = {Proceedings of New Interfaces for Musical Expression},
Date-Added = {2006-02-27 15:25:42 -0500},
Date-Modified = {2012-08-09 16:03:01 +0000},
Local-Url = {file://localhost/Users/alexanje/Reference/Bibdesk/Jensenius/Jensenius_2006b.pdf},
Pages = {176–179},
Publisher = {IRCAM},
Title = {Towards a Gesture Description Interchange Format},
Url = {http://urn.nb.no/URN:NBN:no-21796},
Webpdf = {http://urn.nb.no/URN:NBN:no-21796},
Year = {2006},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDA2Yi5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDA2Yi5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMxC/BotFzAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAwaLDYwAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAwNmIucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAwADYAYgAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAwNmIucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-21796}}

@inproceedings{Kvifte:2006,
Address = {Paris},
Annote = {ISBN: 2-84426-314-3
ISBN: 978-2-84426-314-8},
Author = {Kvifte, Tellef and Jensenius, Alexander Refsum},
Booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression},
Date-Added = {2006-01-25 19:28:11 +0100},
Date-Modified = {2011-12-08 15:10:15 +0000},
Local-Url = {file://localhost/Users/alexanje/Reference/Bibdesk/Kvifte/Kvifte_2006.pdf},
Pages = {220–225},
Publisher = {IRCAM},
Title = {Towards a Coherent Terminology and Model of Instrument Description and Design},
Url = {http://urn.nb.no/URN:NBN:no-21795},
Webpdf = {http://urn.nb.no/URN:NBN:no-21795},
Year = {2006},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QHi4uL1BERnMvS3ZpZnRlL0t2aWZ0ZV8yMDA2LnBkZtIXCxgZV05TLmRhdGFPEQGkAAAAAAGkAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAAM39cPS3ZpZnRlXzIwMDYucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAzgs8Gi0X8AAAAAAAAAAAABAAMAAAkgAAAAAAAAAAAAAAAAAAAABkt2aWZ0ZQAQAAgAAMo8cQMAAAARAAgAAMGiw28AAAABABQADN/XAAbT6gAFbc0ABQDIAAC/MQACAEZNYWNpbnRvc2ggSEQ6VXNlcnM6AGFsZXhhbmplOgBSZWZlcmVuY2U6AFBERnM6AEt2aWZ0ZToAS3ZpZnRlXzIwMDYucGRmAA4AIAAPAEsAdgBpAGYAdABlAF8AMgAwADAANgAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIANFVzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0t2aWZ0ZS9LdmlmdGVfMjAwNi5wZGYAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgCvALQAvAJkAmYCawJ2An8CjQKRApgCoQKmArMCtgLIAssC0AAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALS},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-21795}}

@inproceedings{Torresen:2007c,
Abstract = {This paper presents a comparison of different configurations of a wireless sensor system for capturing human motion. The systems consist of sensor elements which wirelessly transfers motion data to a receiver element. The sensor elements consist of a microcontroller, accelerometer(s) and a radio transceiver. The receiver element consists of a radio receiver connected through a microcontroller to a computer for real time sound synthesis. The wireless transmission between the sensor elements and the receiver element is based on the low rate IEEE 802.15.4/ZigBee standard. A configuration with several accelerometers connected by wire to a wireless sensor element is compared to using multi- ple wireless sensor elements with only one accelerometer in each. The study shows that it would be feasable to connect 5-6 accelerometers in the given setups. Sensor data processing can be done in either the receiver element or in the sensor element. For various reasons it can be reasonable to implement some sensor data processing in the sensor element. The paper also looks at how much time that typically would be needed for a simple pre-processing task.},
Author = {T{\o}rresen, Jim and Norendal, J{\o}rgen and Glette, Kyrre Harald and Renton, Eirik and Jensenius, Alexander Refsum},
Booktitle = {Proceedings of the IEEE Workshop on Design & Diagnostics of Electronic Circuits & Systems (DDECS)},
Date-Added = {2012-08-17 07:21:22 +0000},
Date-Modified = {2012-08-17 08:00:33 +0000},
Pages = {227-230},
Publisher = {IEEE Computer Society Association for Computing Machinery (ACM)},
Title = {Establishing a New Course in Reconfigurable Logic System Design Wireless Sensor Data Collection based on ZigBee Communication},
Year = {2007}}

@inproceedings{Torresen:2007e,
Author = {T{\o}rresen, Jim and L{\o}vland, Tor Arne and Mirmotahari, Omid and Jensenius, Alexander Refsum},
Booktitle = {Proceedings of the IEEE Workshop on Design & Diagnostics of Electronic Circuits & Systems (DDECS)},
Date-Added = {2012-08-17 07:21:22 +0000},
Date-Modified = {2012-08-17 08:01:33 +0000},
Pages = {1-4},
Publisher = {IEEE Computer Society},
Title = {Parts Obsolescence Challenges for the Electronics Industry Intelligente systemer for robotikk og musikk},
Year = {2007}}

@inproceedings{Jensenius:2007c,
Abstract = {This paper summarises a panel discussion at the 2007 International Computer Music Conference on movement and gesture data formats, presents some of the formats currently in development in the computer music community, and outlines some of the challenges involved in future development.},
Address = {Grenoble},
Annote = {Enactive / 07
Proceedings of the 4th International Conference on Enactive Interfaces 2007
ISSN 1958-5497
Published and Edited by:
Association ACROE
INPG, 46 av. F{\’e}lix Viallet
38000 Grenoble
France
},
Author = {Jensenius, Alexander Refsum and Castagn{\’e}, Nicolas and Camurri, Antonio and Maestre, Esteban and Malloch, Joseph and McGilvray, Douglas},
Booktitle = {Proceedings of the 4th International Conference on Enactive Interfaces},
Date-Added = {2007-10-16 14:26:05 +0200},
Date-Modified = {2012-08-15 14:02:35 +0000},
Editor = {Luciani, Annie and Cadoz, Claude},
Local-Url = {file://localhost/Users/alexanje/Reference/Bibdesk/Jensenius/Jensenius_2007c.pdf},
Pages = {125–128},
Publisher = {ACROE},
Title = {A Summary of Formats for Streaming and Storing Music-Related Movement and Gesture data},
Url = {http://urn.nb.no/URN:NBN:no-20629},
Webpdf = {http://urn.nb.no/URN:NBN:no-20629},
Year = {2007},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDA3Yy5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDA3Yy5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMxDzDOYOhAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAwzlngQAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAwN2MucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAwADcAYwAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAwN2MucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-20629}}

@inproceedings{Jensenius:2007b,
Abstract = {The last decade has seen the development of standards for music notation (MusicXML), audio analysis (SDIF), and sound control (OSC), but there are no widespread standards, nor structured approaches, for handling music-related movement, action and gesture data. This panel will address the needs for such formats and standards in the computer music community, and discuss possible directions for future development. },
Address = {Copenhagen},
Author = {Jensenius, Alexander Refsum and Camurri, Antonio and Castagne, Nicolas and Maestre, Esteban and Malloch, Joseph and McGilvray, Douglas and Schwarz, Diemo and Wright, Matthew},
Booktitle = {Proceedings of the International Computer Music Conference},
Date-Added = {2007-05-05 16:56:44 +0200},
Date-Modified = {2012-08-15 14:02:25 +0000},
Local-Url = {file://localhost/Users/alexanje/Reference/Bibdesk/Jensenius/Jensenius_2007a.pdf},
Pages = {13–16},
Title = {Panel: the Need of Formats for Streaming and Storing Music-Related Movement and Gesture Data},
Url = {http://urn.nb.no/URN:NBN:no-21797},
Webpdf = {http://urn.nb.no/URN:NBN:no-21797},
Year = {2007},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDA3YS5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDA3YS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMxDvCYmM3AAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAwmJHFwAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAwN2EucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAwADcAYQAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAwN2EucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-21797}}

@inproceedings{Place:2008a,
Abstract = {Fundamental to the development of musical or artistic creative work is the ability to transform raw materials. This ability implies the facility to master many facets of the material, and to shape it with plasticity. Computer music environments typically provide points of control to manipulate material by supplying parameters with controllable values. This capability to control the values of parameters is inadequate for many artistic endeavors, and does not reflect the analogous tools and methods of artists working with physical materials.

Rather than viewing parameters in computer-based systems as single points of control, the authors posit that parameters must become more multifaceted and dynamic in order to serve the needs of artists. The authors propose an expanded notion of how to work with parameters in computer-centric environments for time-based art. A proposed partial solution to this problem is to give parameters additional properties that define their behavior. An example implementation of these ideas is presented in Jamoma. },
Address = {Belfast},
Annote = {Timothy Place,a Trond Lossius,b Jensenius, Alexander Refsum,c Nils Petersd
},
Author = {Place, Timothy and Lossius, Trond and Jensenius, Alexander Refsum and Peters, Nils},
Booktitle = {Proceedings of the International Computer Music Conference},
Date-Added = {2008-06-12 21:55:13 +0200},
Date-Modified = {2012-08-15 13:58:07 +0000},
Pages = {233–236},
Title = {Flexible Control of Composite Parameters in Max/Msp},
Url = {http://urn.nb.no/URN:NBN:no-20631},
Webpdf = {http://urn.nb.no/URN:NBN:no-20631},
Year = {2008},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QHS4uL1BERnMvUGxhY2UvUGxhY2VfMjAwOGIucGRm0hcLGBlXTlMuZGF0YU8RAaQAAAAAAaQAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMo8jSNIKwAAAA0c7g9QbGFjZV8yMDA4Yi5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADR05xHc3PwAAAAAAAAAAAAEAAwAACSAAAAAAAAAAAAAAAAAAAAAFUGxhY2UAABAACAAAyjxxAwAAABEACAAAxHcbHwAAAAEAFAANHO4ABtPqAAVtzQAFAMgAAL8xAAIARU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoAUGxhY2U6AFBsYWNlXzIwMDhiLnBkZgAADgAgAA8AUABsAGEAYwBlAF8AMgAwADAAOABiAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAzVXNlcnMvYWxleGFuamUvUmVmZXJlbmNlL1BERnMvUGxhY2UvUGxhY2VfMjAwOGIucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOAK4AswC7AmMCZQJqAnUCfgKMApAClwKgAqUCsgK1AscCygLPAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAtE=},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-20631}}

@inproceedings{Place:2008,
Abstract = {An approach for creating structured Open Sound Control (OSC) messages by separating the addressing of node values and node properties is suggested. This includes a method for querying values and properties. As a result, it is possible to address complex nodes as classes inside of more complex tree structures using an OSC namespace. This is particularly useful for creating flexible communication in modular systems. A prototype implementation is presented and discussed.},
Address = {Genova, Italy},
Annote = {ISBN 978-88-901344-6-3},
Author = {Place, Timothy and Lossius, Trond and Jensenius, Alexander Refsum and Peters, Nils and Baltazar, Pascal},
Booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression},
Date-Added = {2008-06-12 21:53:07 +0200},
Date-Modified = {2012-08-15 13:58:00 +0000},
Pages = {181–184},
Title = {Addressing Classes by Differentiating Values and Properties in OSC},
Url = {http://urn.nb.no/URN:NBN:no-28145},
Webpdf = {http://urn.nb.no/URN:NBN:no-28145},
Year = {2008},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QHC4uL1BERnMvUGxhY2UvUGxhY2VfMjAwOC5wZGbSFwsYGVdOUy5kYXRhTxEBngAAAAABngACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADRzuDlBsYWNlXzIwMDgucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANHTfEdzc7AAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAVQbGFjZQAAEAAIAADKPHEDAAAAEQAIAADEdxsbAAAAAQAUAA0c7gAG0+oABW3NAAUAyAAAvzEAAgBETWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBQbGFjZToAUGxhY2VfMjAwOC5wZGYADgAeAA4AUABsAGEAYwBlAF8AMgAwADAAOAAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAMlVzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL1BsYWNlL1BsYWNlXzIwMDgucGRmABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4ArQCyALoCXAJeAmMCbgJ3AoUCiQKQApkCngKrAq4CwALDAsgAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAACyg==},
Bdsk-File-2 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QHS4uL1BERnMvUGxhY2UvUGxhY2VfMjAwOGEucGRm0hcLGBlXTlMuZGF0YU8RAaQAAAAAAaQAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMo8jSNIKwAAAA0c7g9QbGFjZV8yMDA4YS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADR04xHc3VQAAAAAAAAAAAAEAAwAACSAAAAAAAAAAAAAAAAAAAAAFUGxhY2UAABAACAAAyjxxAwAAABEACAAAxHcbNQAAAAEAFAANHO4ABtPqAAVtzQAFAMgAAL8xAAIARU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoAUGxhY2U6AFBsYWNlXzIwMDhhLnBkZgAADgAgAA8AUABsAGEAYwBlAF8AMgAwADAAOABhAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAzVXNlcnMvYWxleGFuamUvUmVmZXJlbmNlL1BERnMvUGxhY2UvUGxhY2VfMjAwOGEucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOAK4AswC7AmMCZQJqAnUCfgKMApAClwKgAqUCsgK1AscCygLPAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAtE=},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-28145}}

@inproceedings{Jensenius:2008b,
Abstract = {The paper presents some challenges faced in developing an experimental setup for studying coarticulation in music-related body movements. This has included solutions for storing and synchronising motion capture, biosensor and MIDI data, and related audio and video files. The implementation is based on a multilayered Gesture Description Interchange Format (GDIF) structure, written to Sound Description Interchange Format (SDIF) files using the graphical programming environment Max/MSP. },
Address = {Belfast},
Author = {Jensenius, Alexander Refsum and Nymoen, Kristian and God{\o}y, Rolf Inge},
Booktitle = {Proceedings of the International Computer Music Conference},
Date-Added = {2008-05-30 17:23:56 +0200},
Date-Modified = {2012-08-15 13:58:16 +0000},
Pages = {743–746},
Title = {A Multilayered {GDIF}-Based Setup for Studying Coarticulation in the Movements of Musicians},
Url = {http://urn.nb.no/URN:NBN:no-20632},
Webpdf = {http://urn.nb.no/URN:NBN:no-20632},
Year = {2008},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDA4YS5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDA4YS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMxJLEZeeqAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAxGXLigAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAwOGEucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAwADgAYQAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAwOGEucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-20632}}

@inproceedings{Jensenius:2008a,
Abstract = {Mobile music technology opens many new opportunities in terms of location-aware systems, social interaction etc., but we should not forget that many challenges faced in ”immobile” music technology research are also apparent in mobile computing. This paper presents an overview of some challenges related to the design of action-sound relationships and music-movement correspondences, and suggests how these can be studied and tested in mobile devices.},
Address = {Vienna},
Author = {Jensenius, Alexander Refsum},
Booktitle = {Proceedings of the 5th International Mobile Music Workshop},
Date-Added = {2008-05-15 15:11:10 +0200},
Date-Modified = {2012-08-15 13:58:22 +0000},
Pages = {19–22},
Title = {Some Challenges Related to Music and Movement in Mobile Music Technology},
Url = {http://urn.nb.no/URN:NBN:no-21769},
Webpdf = {http://urn.nb.no/URN:NBN:no-21769},
Year = {2008},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJC4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDA4LnBkZtIXCxgZV05TLmRhdGFPEQG6AAAAAAG6AAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAAMwroSSmVuc2VuaXVzXzIwMDgucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAzEP8Qn1XwAAAAAAAAAAAABAAMAAAkgAAAAAAAAAAAAAAAAAAAACUplbnNlbml1cwAAEAAIAADKPHEDAAAAEQAIAADEJ7lcAAAAAQAUAAzCugAG0+oABW3NAAUAyAAAvzEAAgBMTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBKZW5zZW5pdXM6AEplbnNlbml1c18yMDA4LnBkZgAOACYAEgBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAwADgALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADpVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9KZW5zZW5pdXMvSmVuc2VuaXVzXzIwMDgucGRmABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AtQC6AMICgAKCAocCkgKbAqkCrQK0Ar0CwgLPAtIC5ALnAuwAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC7g==},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-21769}}

@inproceedings{Torresen:2009c,
Author = {T{\o}rresen, Jim and Glette, Kyrre Harald and Jensenius, Alexander Refsum and Furuholmen, Marcus},
Booktitle = {Proceedings of the first Norwegian Artificial Intelligence Symposium},
Date-Added = {2012-08-17 07:21:22 +0000},
Date-Modified = {2012-08-17 08:01:41 +0000},
Pages = {1-4},
Publisher = {Tapir Akademisk Forlag},
Title = {Robotics and Intelligent Systems at University of Oslo},
Year = {2009}}

@inproceedings{Godoy:2009f,
Abstract = {We can see many and strong links between music and hu- man body movement in musical performance, in dance, and in the variety of movements that people make in lis- tening situations. There is evidence that sensations of hu- man body movement are integral to music as such, and that sensations of movement are efficient carriers of infor- mation about style, genre, expression, and emotions. The challenge now in MIR is to develop means for the extrac- tion and representation of movement-inducing cues from musical sound, as well as to develop possibilities for using body movement as input to search and navigation inter- faces in MIR. contexts.},
Address = {Kobe, Japan},
Annote = {October 26-30},
Author = {God{\o}y, Rolf Inge and Jensenius, Alexander Refsum},
Booktitle = {Proceedings of the 10th International Society for Music Information Retrieval Conference},
Date-Added = {2009-10-29 14:09:31 +0100},
Date-Modified = {2011-12-08 21:52:57 +0000},
Pages = {45–50},
Title = {Body Movement in Music Information Retrieval},
Url = {http://urn.nb.no/URN:NBN:no-23872},
Webpdf = {http://urn.nb.no/URN:NBN:no-23872},
Year = {2009},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YW8QHQAuAC4ALwBQAEQARgBzAC8ARwBvAGQA+AB5AC8ARwBvAGQA+AB5AF8AMgAwADAAOQBjAC4AcABkAGbSFwsYGVdOUy5kYXRhTxEBpgAAAAABpgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAACMqDD0dvZL95XzIwMDljLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALpQ7Ha3yIAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAVHb2S/eQAAEAAIAADKPHEDAAAAEQAIAADHa254AAAAAQAUAAjKgwAG0+oABW3NAAUAyAAAvzEAAgBFTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBHb2S/eToAR29kv3lfMjAwOWMucGRmAAAOACAADwBHAG8AZAD4AHkAXwAyADAAMAA5AGMALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADVVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9Hb2TDuHkvR29kw7h5XzIwMDljLnBkZgAAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgDLANAA2AKCAoQCiQKUAp0CqwKvArYCvwLEAtEC1ALmAukC7gAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALw},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-23872}}

@inproceedings{Jensenius:2009b,
Abstract = {1. Background: Carrying out research on music-related body movements involves working with different types of data (e.g. motion capture and sensor data) and media (i.e. audio, video), each having its own size, dimensions, speed etc. While each of the data types and media have their own analytical tools and representation techniques, we see the need for developing more tools that allow for studying all the data and media together in a synchronised manner. We have previously developed solutions for studying musical sound and movement using synchronised spectrograms of audio and motiongrams of video. Now as we have started using an infrared motion capture system in our research, we see the need for better techniques for visualising the multidimensional data sets (e.g. 50 markers x 3 dimensions x 100 Hz). While there are several techniques for doing this independently of audio and video, we are working on tools that integrate well with our displays of spectrograms and motiongrams.

2. Aims: Creating reduced representations of multidimensional motion capture data of complex music-related body movement that can be used together with spectrograms and motiongrams.

3. Method (if applicable)

4. Results/Main Contribution: We present some of the visualisation techniques we have been developing to display multidimensional data sets: 1) reduction based on collapsing dimensions, 2) reduction based on frame differencing, 3) colour coding of movement features. Examples are given of how these techniques allow for displaying reduced displays of multidimensional motion capture data sets synchronised with spectrograms and motiongrams.

5. Conclusions/Implications: The techniques presented allows for studying relationships between movement and sound in music performance, and make it possible to create visual displays of movement and sound that can be used on screen and in printed documents.},
Address = {Jyv{\”a}skyl{\”a}, Finland},
Author = {Jensenius, Alexander Refsum and Skogstad, St{\aa}le Andreas van Dorp and Nymoen, Kristian and T{\o}rresen, Jim and H{\o}vin, Mats Erling},
Booktitle = {Proceedings of ESCOM 2009: 7th Triennial Conference of the European Society for the Cognitive Sciences of Music},
Date-Added = {2009-08-16 14:45:47 +0300},
Date-Modified = {2011-10-18 19:27:28 +0000},
Title = {Reduced displays of multidimensional motion capture data sets of musical performance},
Year = {2009},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDA5Yy5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDA5Yy5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMxKfGrXhjAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAxq1cQwAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAwOWMucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAwADkAYwAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAwOWMucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=}}

@inproceedings{Godoy:2009d,
Address = {Jyv{\”a}skyl{\”a}, Finland},
Author = {God{\o}y, Rolf Inge and Jensenius, Alexander Refsum and Nymoen, Kristian},
Booktitle = {Proceedings of ESCOM 2009: 7th Triennial Conference of the European Society for the Cognitive Sciences of Music},
Date-Added = {2009-08-16 14:44:20 +0300},
Date-Modified = {2010-01-31 15:01:41 +0100},
Title = {Coarticulation of sound and movement in music},
Year = {2009}}

@inproceedings{Godoy:2009b,
Address = {Bielefeld, Germany},
Author = {God{\o}y, Rolf Inge and Jensenius, Alexander Refsum and Nymoen, Kristian},
Booktitle = {Gesture Workshop 25–27 February 2009},
Date-Added = {2009-03-01 13:07:43 +0100},
Date-Modified = {2009-04-27 17:57:22 +0200},
Title = {Chunking by coarticulation in music-related gestures},
Year = {2009},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YW8QHQAuAC4ALwBQAEQARgBzAC8ARwBvAGQA+AB5AC8ARwBvAGQA+AB5AF8AMgAwADAAOQBhAC4AcABkAGbSFwsYGVdOUy5kYXRhTxEBpgAAAAABpgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAACMqDD0dvZL95XzIwMDlhLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALAw7E89ocAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAVHb2S/eQAAEAAIAADKPHEDAAAAEQAIAADE8738AAAAAQAUAAjKgwAG0+oABW3NAAUAyAAAvzEAAgBFTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBHb2S/eToAR29kv3lfMjAwOWEucGRmAAAOACAADwBHAG8AZAD4AHkAXwAyADAAMAA5AGEALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADVVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9Hb2TDuHkvR29kw7h5XzIwMDlhLnBkZgAAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgDLANAA2AKCAoQCiQKUAp0CqwKvArYCvwLEAtEC1ALmAukC7gAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALw}}

@inproceedings{Nymoen:2009,
Abstract = {The paper presents Nymophone2, an acoustic instrument with a complex relationship between performance actions and emergent sound. A method for describing the multidimen- sional control actions needed to play the instrument is pre- sented and discussed.},
Address = {Pittsburgh, Penn.},
Author = {Nymoen, Kristian and Jensenius, Alexander Refsum},
Booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression},
Date-Added = {2009-04-14 07:47:26 +0200},
Date-Modified = {2012-08-17 13:32:14 +0000},
Keywords = {Nymophone, Instruments, Mapping, Playing Technique, Sound Taxonomies},
Month = {4–6 June},
Pages = {94–97},
Publisher = {Carnegie-Mellon University},
Title = {A discussion of multidimensional mapping in {N}ymophone2},
Url = {http://urn.nb.no/URN:NBN:no-23430},
Webpdf = {http://urn.nb.no/URN:NBN:no-23430},
Year = {2009},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QHi4uL1BERnMvTnltb2VuL055bW9lbl8yMDA5LnBkZtIXCxgZV05TLmRhdGFPEQGkAAAAAAGkAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAANC3wPTnltb2VuXzIwMDkucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA0LwMYFRXcAAAAAAAAAAAABAAMAAAkgAAAAAAAAAAAAAAAAAAAABk55bW9lbgAQAAgAAMo8cQMAAAARAAgAAMYFKVcAAAABABQADQt8AAbT6gAFbc0ABQDIAAC/MQACAEZNYWNpbnRvc2ggSEQ6VXNlcnM6AGFsZXhhbmplOgBSZWZlcmVuY2U6AFBERnM6AE55bW9lbjoATnltb2VuXzIwMDkucGRmAA4AIAAPAE4AeQBtAG8AZQBuAF8AMgAwADAAOQAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIANFVzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL055bW9lbi9OeW1vZW5fMjAwOS5wZGYAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgCvALQAvAJkAmYCawJ2An8CjQKRApgCoQKmArMCtgLIAssC0AAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALS},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-23430}}

@inproceedings{Torresen:2009,
Address = {Trondheim, Norway},
Author = {Jim Torresen and Kyrre Glette and Jensenius, Alexander Refsum and Marcus Furumolmen},
Booktitle = {Proceedings of the first Norwegian Artificial Intelligence Symposium},
Date-Added = {2010-09-11 10:56:11 +0200},
Date-Modified = {2011-12-16 13:28:25 +0000},
Editor = {Anders Kofod-Petersen and Helge Langseth and Odd Erik Gundersen},
Month = {November},
Pages = {3-6},
Publisher = {Tapir Akademisk Forlag},
Title = {Robotics and Intelligent Systems at University of Oslo},
Year = 2009,
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QIi4uL1BERnMvVG9ycmVzZW4vVG9ycmVzZW5fMjAwOS5wZGbSFwsYGVdOUy5kYXRhTxEBsgAAAAABsgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAAER/7EVRvcnJlc2VuXzIwMDkucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARH/zKr63gAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAhUb3JyZXNlbgAQAAgAAMo8cQMAAAARAAgAAMqvkcAAAAABABQAER/7AAbT6gAFbc0ABQDIAAC/MQACAEpNYWNpbnRvc2ggSEQ6VXNlcnM6AGFsZXhhbmplOgBSZWZlcmVuY2U6AFBERnM6AFRvcnJlc2VuOgBUb3JyZXNlbl8yMDA5LnBkZgAOACQAEQBUAG8AcgByAGUAcwBlAG4AXwAyADAAMAA5AC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgA4VXNlcnMvYWxleGFuamUvUmVmZXJlbmNlL1BERnMvVG9ycmVzZW4vVG9ycmVzZW5fMjAwOS5wZGYAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgCzALgAwAJ2AngCfQKIApECnwKjAqoCswK4AsUCyALaAt0C4gAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALk}}

@inproceedings{Jensenius:2010f,
Abstract = {We report on the development of a video based analysis system that controls concatenative sound synthesis and sound spatialisation in realtime in concert performances. The system has been used in several pieces, most recently Transformation for electric violin and live electronics, where the performer controls sound playback through motion on stage.},
Address = {Trondheim},
Annote = {Gj{\o}vik},
Author = {Jensenius, Alexander Refsum and Johnson, Victoria},
Booktitle = {Proceedings of Norwegian Artificial Intelligence Symposium},
Date-Added = {2010-10-23 08:24:04 +0200},
Date-Modified = {2011-12-08 22:00:59 +0000},
Editor = {Yildirim, Sule and Kofod-Petersen, Andersen},
Pages = {85–88},
Publisher = {Tapir Akademisk Forlag},
Title = {A video based analysis system for realtime control of concatenative sound synthesis and spatialisation},
Url = {http://urn.nb.no/URN:NBN:no-28129},
Webpdf = {http://urn.nb.no/URN:NBN:no-28129},
Year = {2010},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDEwZC5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDEwZC5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMxgDI6EwcAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAyOgv/AAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAxMGQucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAxADAAZAAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAxMGQucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-28129}}

@inproceedings{Glette:2010,
Abstract = {The paper addresses possibilities of extracting information from music-related actions, in the particular case of what we call sound-tracings. These tracings are recordings from a graphics tablet of subjects’ drawings associated with a set of short sounds. Although the subjects’ associations to sounds are very subjective, and thus the resulting tracings are very different, an attempt is made at extracting some global features which can be used for comparison between tracings. These features are then analyzed and classified with an SVM classifier.},
Address = {Trondheim},
Annote = {Gj{\o}vik},
Author = {Glette, Kyrre and Jensenius, Alexander Refsum and God{\o}y, Rolf Inge},
Booktitle = {Proceedings of Norwegian Artificial Intelligence Symposium},
Date-Added = {2010-10-18 09:14:29 +0200},
Date-Modified = {2011-12-08 22:00:48 +0000},
Editor = {Yildirim, Sule and Kofod-Petersen, Andersen},
Pages = {63–66},
Publisher = {Tapir Akademisk Forlag},
Title = {Extracting Action-Sound Features From a Sound-Tracing Study},
Url = {http://urn.nb.no/URN:NBN:no-28128},
Webpdf = {http://urn.nb.no/URN:NBN:no-28128},
Year = {2010},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QHi4uL1BERnMvR2xldHRlL0dsZXR0ZV8yMDEwLnBkZtIXCxgZV05TLmRhdGFPEQGkAAAAAAGkAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAAIyoAPR2xldHRlXzIwMTAucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAjKgcjoTBIAAAAAAAAAAAABAAMAAAkgAAAAAAAAAAAAAAAAAAAABkdsZXR0ZQAQAAgAAMo8cQMAAAARAAgAAMjoL/IAAAABABQACMqAAAbT6gAFbc0ABQDIAAC/MQACAEZNYWNpbnRvc2ggSEQ6VXNlcnM6AGFsZXhhbmplOgBSZWZlcmVuY2U6AFBERnM6AEdsZXR0ZToAR2xldHRlXzIwMTAucGRmAA4AIAAPAEcAbABlAHQAdABlAF8AMgAwADEAMAAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIANFVzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0dsZXR0ZS9HbGV0dGVfMjAxMC5wZGYAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgCvALQAvAJkAmYCawJ2An8CjQKRApgCoQKmArMCtgLIAssC0AAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALS},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-28128}}

@inproceedings{Jensenius:2010b,
Abstract = {The paper reports on the development and activities in the recently established fourMs lab (Music, Mind, Motion, Ma- chines) at the University of Oslo, Norway. As a meeting place for researchers in music and informatics, the fourMs lab is centred around studies of basic issues in music cogni- tion, machine learning and robotics.},
Address = {New York},
Annote = {Jensenius, Alexander Refsum, Kyrre Glette, Rolf Inge God{\o}y, Mats H{\o}vin, Kristian Nymoen, Sta ?le A. Skogstad, Jim Torresen},
Author = {Jensenius, Alexander Refsum and Glette, Kyrre and God{\o}y, Rolf Inge and H{\o}vin, Mats Erling and Nymoen, Kristian and Skogstad, St{\aa}le A. and T{\o}rresen, Jim},
Booktitle = {Proceedings of the 2010 International Computer Music Conference},
Date-Added = {2010-04-28 09:24:05 +0200},
Date-Modified = {2011-12-13 08:02:21 +0000},
Pages = {290–293},
Title = {{fourMs, University of Oslo — Lab Report}},
Url = {http://urn.nb.no/URN:NBN:no-24840},
Webpdf = {http://urn.nb.no/URN:NBN:no-24840},
Year = {2010},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDEwYS5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDEwYS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMxP7H7ilwAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAx+4NUAAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAxMGEucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAxADAAYQAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAxMGEucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-File-2 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QGC4uLy4uLy5UcmFzaC9kb2QtaWR4LnBkZtIXCxgZV05TLmRhdGFPEQFuAAAAAAFuAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAAGZc8LZG9kLWlkeC5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABfICMsMxOQAAAAAAAAAAAACAAIAAAkgAAAAAAAAAAAAAAAAAAAABi5UcmFzaAAQAAgAAMo8cQMAAAARAAgAAMsMttQAAAABAAwABmXPAAUAyAAAvzEAAgAxTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToALlRyYXNoOgBkb2QtaWR4LnBkZgAADgAYAAsAZABvAGQALQBpAGQAeAAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAIVVzZXJzL2FsZXhhbmplLy5UcmFzaC9kb2QtaWR4LnBkZgAAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgCpAK4AtgIoAioCLwI6AkMCUQJVAlwCZQJqAncCegKMAo8ClAAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAAKW},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-24840}}

@inproceedings{Skogstad:2010,
Abstract = {The paper presents a conceptual overview of how optical infrared marker based motion capture systems (IrMoCap) can be used in musical interaction. First we present a review of related work of using IrMoCap for musical control. This is followed by a discussion of possible features which can be exploited. Finally, the question of mapping movement features to sound features is presented and discussed.},
Address = {Sydney},
Annote = {editors: Beilharz, Kirsty and Johnston, Andrew and Ferguson, Sam and Chen, Yi-Chun},
Author = {Skogstad, St{\aa}le A. and Jensenius, Alexander Refsum and Nymoen, Kristian},
Booktitle = {Proceedings of the International Conference on New Interfaces For Musical Expression},
Date-Added = {2010-04-28 09:22:08 +0200},
Date-Modified = {2012-08-24 10:36:25 +0000},
Pages = {407–410},
Title = {Using {IR} Optical Marker Based Motion Capture for Exploring Musical Interaction},
Url = {http://urn.nb.no/URN:NBN:no-25844},
Webpdf = {http://urn.nb.no/URN:NBN:no-25844},
Year = {2010},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QIy4uL1BERnMvU2tvZ3N0YWQvU2tvZ3N0YWRfMjAxMGEucGRm0hcLGBlXTlMuZGF0YU8RAbgAAAAAAbgAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMo8jSNIKwAAABEd5RJTa29nc3RhZF8yMDEwYS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAER3qyD0J7AAAAAAAAAAAAAEAAwAACSAAAAAAAAAAAAAAAAAAAAAIU2tvZ3N0YWQAEAAIAADKPHEDAAAAEQAIAADIPO3MAAAAAQAUABEd5QAG0+oABW3NAAUAyAAAvzEAAgBLTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBTa29nc3RhZDoAU2tvZ3N0YWRfMjAxMGEucGRmAAAOACYAEgBTAGsAbwBnAHMAdABhAGQAXwAyADAAMQAwAGEALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADlVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9Ta29nc3RhZC9Ta29nc3RhZF8yMDEwYS5wZGYAABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AtAC5AMECfQJ/AoQCjwKYAqYCqgKxAroCvwLMAs8C4QLkAukAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC6w==},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-25844}}

@inproceedings{Nymoen:2010,
Abstract = {In this paper we present a method for studying relationships between features of sound and features of movement. The method has been tested by carrying out an experiment with people moving an object in space along with short sounds. 3D position data of the object was recorded and several features were calculated from each of the recordings. These features were provided as input to a classifier which was able to classify the recorded actions satisfactorily; particularly when taking into account that the only link between the actions performed by the different subjects were the sound they heard while making the action.},
Address = {Sydney},
Annote = {editor: Beilharz, Kirsty and Johnston, Andrew and Ferguson, Sam and Chen, Yi-Chun
publisher: University of Technology Sydney},
Author = {Nymoen, Kristian and Glette, Kyrre and Skogstad, St{\aa}le A. and T{\o}rresen, Jim and Jensenius, Alexander Refsum},
Booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression},
Date-Added = {2010-04-28 09:21:06 +0200},
Date-Modified = {2012-08-17 13:33:05 +0000},
Pages = {259–262},
Title = {Searching for cross-individual relationships between sound and movement features using an {SVM} classifier},
Url = {http://urn.nb.no/URN:NBN:no-25842},
Webpdf = {http://urn.nb.no/URN:NBN:no-25842},
Year = {2010},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QHS4uLy4uLy5UcmFzaC9OeW1vZW5fMjAxMGEucGRm0hcLGBlXTlMuZGF0YU8RAYAAAAAAAYAAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMjZvt1IKwAAABh5shBOeW1vZW5fMjAxMGEucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHPsuyD0J2QAAAAAAAAAAAAIAAgAACSAAAAAAAAAAAAAAAAAAAAAGLlRyYXNoABAACAAAyNmivQAAABEACAAAyDztuQAAAAEADAAYebIACEwqAACTJQACADZNYWNpbnRvc2ggSEQ6VXNlcnM6AGFsZXhhbmplOgAuVHJhc2g6AE55bW9lbl8yMDEwYS5wZGYADgAiABAATgB5AG0AbwBlAG4AXwAyADAAMQAwAGEALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASACZVc2Vycy9hbGV4YW5qZS8uVHJhc2gvTnltb2VuXzIwMTBhLnBkZgATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOAK4AswC7Aj8CQQJGAlECWgJoAmwCcwJ8AoECjgKRAqMCpgKrAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAq0=},
Bdsk-File-2 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QHy4uL1BERnMvTnltb2VuL055bW9lbl8yMDEwYS5wZGbSFwsYGVdOUy5kYXRhTxEBqgAAAAABqgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADQt8EE55bW9lbl8yMDEwYS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANC8zK30DLAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAZOeW1vZW4AEAAIAADKPHEDAAAAEQAIAADK3zK7AAAAAQAUAA0LfAAG0+oABW3NAAUAyAAAvzEAAgBHTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBOeW1vZW46AE55bW9lbl8yMDEwYS5wZGYAAA4AIgAQAE4AeQBtAG8AZQBuAF8AMgAwADEAMABhAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgA1VXNlcnMvYWxleGFuamUvUmVmZXJlbmNlL1BERnMvTnltb2VuL055bW9lbl8yMDEwYS5wZGYAABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AsAC1AL0CawJtAnICfQKGApQCmAKfAqgCrQK6Ar0CzwLSAtcAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC2Q==},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-25842}}

@inproceedings{Torresen:2010,
Abstract = {This paper presents a comparison of different configurations of a wireless sensor system for capturing human motion. The systems consist of sensor elements which wirelessly transfers motion data to a receiver element. The sensor elements consist of a microcontroller, accelerometer(s) and a radio transceiver. The receiver element consists of a radio receiver connected through a microcontroller to a computer for real time sound synthesis. The wireless transmission between the sensor elements and the receiver element is based on the low rate IEEE 802.15.4/ZigBee standard.

A configuration with several accelerometers connected by wire to a wireless sensor element is compared to using multi- ple wireless sensor elements with only one accelerometer in each. The study shows that it would be feasable to connect 5-6 accelerometers in the given setups.

Sensor data processing can be done in either the receiver element or in the sensor element. For various reasons it can be reasonable to implement some sensor data processing in the sensor element. The paper also looks at how much time that typically would be needed for a simple pre-processing task.},
Address = {Sydney},
Annote = {editor: Beilharz, Kirsty and Johnston, Andrew and Ferguson, Sam and Chen, Yi-Chun},
Author = {Torresen, Jim and Renton, Eirik and Jensenius, Alexander Refsum},
Booktitle = {Proceedings of New Interfaces for Musical Expression++},
Date-Added = {2010-04-28 09:19:21 +0200},
Date-Modified = {2011-12-08 21:55:46 +0000},
Pages = {368–371},
Title = {Wireless Sensor Data Collection based on ZigBee Communication},
Url = {http://urn.nb.no/URN:NBN:no-25843},
Webpdf = {http://urn.nb.no/URN:NBN:no-25843},
Year = {2010},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QIy4uL1BERnMvVG9ycmVzZW4vVG9ycmVzZW5fMjAxMGEucGRm0hcLGBlXTlMuZGF0YU8RAbgAAAAAAbgAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMo8jSNIKwAAABEf+xJUb3JyZXNlbl8yMDEwYS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAESAJyD0J6AAAAAAAAAAAAAEAAwAACSAAAAAAAAAAAAAAAAAAAAAIVG9ycmVzZW4AEAAIAADKPHEDAAAAEQAIAADIPO3IAAAAAQAUABEf+wAG0+oABW3NAAUAyAAAvzEAAgBLTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBUb3JyZXNlbjoAVG9ycmVzZW5fMjAxMGEucGRmAAAOACYAEgBUAG8AcgByAGUAcwBlAG4AXwAyADAAMQAwAGEALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADlVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9Ub3JyZXNlbi9Ub3JyZXNlbl8yMDEwYS5wZGYAABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AtAC5AMECfQJ/AoQCjwKYAqYCqgKxAroCvwLMAs8C4QLkAukAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC6w==},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-25843}}

@inproceedings{Johansen:2010,
Author = {Johansen, Espen Borg{\aa} and Nymoen, Kristian and Jensenius, Alexander Refsum and Aase, Heidi and Sagvolden, Terje},
Booktitle = {ADHD nettverkssamling},
Date-Added = {2010-03-10 10:43:53 +0100},
Date-Modified = {2010-03-10 10:44:40 +0100},
Title = {Video analyses of behavior – A future tool for identifying ADHD?},
Year = {2010},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QIi4uL1BERnMvSm9oYW5zZW4vSm9oYW5zZW5fMjAxMC5wZGbSFwsYGVdOUy5kYXRhTxEBsgAAAAABsgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADNekEUpvaGFuc2VuXzIwMTAucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAM16XHvSg2AAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAhKb2hhbnNlbgAQAAgAAMo8cQMAAAARAAgAAMe9GiYAAAABABQADNekAAbT6gAFbc0ABQDIAAC/MQACAEpNYWNpbnRvc2ggSEQ6VXNlcnM6AGFsZXhhbmplOgBSZWZlcmVuY2U6AFBERnM6AEpvaGFuc2VuOgBKb2hhbnNlbl8yMDEwLnBkZgAOACQAEQBKAG8AaABhAG4AcwBlAG4AXwAyADAAMQAwAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgA4VXNlcnMvYWxleGFuamUvUmVmZXJlbmNlL1BERnMvSm9oYW5zZW4vSm9oYW5zZW5fMjAxMC5wZGYAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgCzALgAwAJ2AngCfQKIApECnwKjAqoCswK4AsUCyALaAt0C4gAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALk}}

@inproceedings{Jensenius:2010a,
Abstract = {We report on a study of perceptual and acoustic features related to the placement of microphones around a custom made glass instrument. Different microphone setups were tested: above, inside and outside the instrument and at different distances. The sounds were evaluated by an expert performer, and further qualitative and quantitative analyses have been carried out. Preference was given to the recordings from microphones placed close to the rim of the instrument, either from the inside or the outside.},
Address = {Sydney},
Annote = {Jensenius, Alexander Refsum and Innervik, Kjell Tore and Frounberg, Ivar

Beilharz, Kirsty and Johnston, Andrew and Ferguson, Sam and Chen, Yi-Chun},
Author = {Jensenius, Alexander Refsum and Innervik, Kjell Tore and Frounberg, Ivar},
Booktitle = {Proceedings of the International Conference on New Interfaces For Musical Expression},
Date-Added = {2010-02-05 13:32:36 +0100},
Date-Modified = {2013-01-22 08:26:49 +0000},
Pages = {208–211},
Title = {Evaluating the Subjective Effects of Microphone Placement on Glass Instruments},
Url = {http://urn.nb.no/URN:NBN:no-26164},
Webpdf = {http://urn.nb.no/URN:NBN:no-26164},
Year = {2010},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDEwYi5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDEwYi5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMxTnIPQn7AAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAyDzt2wAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAxMGIucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAxADAAYgAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAxMGIucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-26164}}

@inproceedings{Frounberg:2010,
Abstract = {The paper reports on the development of prototypes of glass instruments. The focus has been on developing acoustic instruments specifically designed for electronic treatment, and where timbral qualities have had priority over pitch. The paper starts with a brief historical overview of glass instruments and their artistic use. Then follows an overview of the glass blowing process. Finally the musical use of the instruments is discussed.},
Address = {Sydney},
Annote = {Frounberg, Ivar and Innervik, Kjell Tore and Jensenius, Alexander Refsum},
Author = {Frounberg, Ivar and Jensenius, Alexander Refsum and Innervik, Kjell Tore},
Booktitle = {Proceedings of the International Conference on New Interfaces For Musical Expression},
Date-Added = {2010-01-25 10:37:30 +0100},
Date-Modified = {2013-01-22 08:26:35 +0000},
Editor = {Beilharz, Kirsty and Johnston, Andrew and Ferguson, Sam and Chen, Yi-Chun},
Pages = {287–290},
Title = {Glass instruments — From Pitch to Timbre},
Url = {http://urn.nb.no/URN:NBN:no-26165},
Year = {2010},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvRnJvdW5iZXJnL0Zyb3VuYmVyZ18yMDEwYS5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAAB4oFE0Zyb3VuYmVyZ18yMDEwYS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHij3IPQnjAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlGcm91bmJlcmcAABAACAAAyjxxAwAAABEACAAAyDztwwAAAAEAFAAHigUABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoARnJvdW5iZXJnOgBGcm91bmJlcmdfMjAxMGEucGRmAAAOACgAEwBGAHIAbwB1AG4AYgBlAHIAZwBfADIAMAAxADAAYQAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0Zyb3VuYmVyZy9Gcm91bmJlcmdfMjAxMGEucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-26165}}

@inproceedings{Nymoen:2011b,
Abstract = {The paper presents the SoundSaber a musical instrument based on motion capture technology. We present technical details of the instrument and discuss the design development process. The SoundSaber may be used as an example of how high-fidelity motion capture equipment can be used for prototyping musical instruments, and we illustrate this with an example of a low-cost implementation of our motion capture instrument.},
Address = {Oslo},
Author = {Nymoen, Kristian and Skogstad, St{\aa}le A. and Jensenius, Alexander Refsum},
Booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression},
Date-Added = {2011-10-03 14:23:15 +0000},
Date-Modified = {2012-10-15 10:56:26 +0000},
Issn = {2220-4806},
Keywords = {NIME 2011},
Pages = {312–315},
Title = {SoundSaber — A Motion Capture Instrument},
Url = {http://urn.nb.no/URN:NBN:no-29584},
Webpdf = {http://urn.nb.no/URN:NBN:no-29584},
Year = {2011},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QHy4uL1BERnMvTnltb2VuL055bW9lbl8yMDExZC5wZGbSFwsYGVdOUy5kYXRhTxEBqgAAAAABqgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADQt8EE55bW9lbl8yMDExZC5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYfe7LD1n2AAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAZOeW1vZW4AEAAIAADKPHEDAAAAEQAIAADLD0vmAAAAAQAUAA0LfAAG0+oABW3NAAUAyAAAvzEAAgBHTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBOeW1vZW46AE55bW9lbl8yMDExZC5wZGYAAA4AIgAQAE4AeQBtAG8AZQBuAF8AMgAwADEAMQBkAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgA1VXNlcnMvYWxleGFuamUvUmVmZXJlbmNlL1BERnMvTnltb2VuL055bW9lbl8yMDExZC5wZGYAABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AsAC1AL0CawJtAnICfQKGApQCmAKfAqgCrQK6Ar0CzwLSAtcAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC2Q==},
Bdsk-Url-1 = {http://www.nime2011.org/proceedings/papers/G26-Nymoen.pdf},
Bdsk-Url-2 = {http://urn.nb.no/URN:NBN:no-29584}}

@inproceedings{Skogstad:2011,
Abstract = {The paper presents research about implementing a full body inertial motion capture system, the Xsens MVN suit, for musical interaction. Three different approaches for streaming real time and prerecorded motion capture data with Open Sound Control have been implemented. Furthermore, we present technical performance details and our experience with the motion capture system in realistic practice.},
Address = {Oslo, Norway},
Author = {St{\aa}le A. Skogstad and Kristian Nymoen and Yago De Quay and Jensenius, Alexander Refsum},
Booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression},
Date-Added = {2011-09-27 18:35:34 +0000},
Date-Modified = {2012-04-03 14:20:17 +0000},
Doi = {http://urn.nb.no/URN:NBN:no-29815},
Issn = {2220-4806},
Pages = {300-303},
Title = {{OSC} Implementation and Evaluation of the {X}sens {MVN} suit},
Url = {http://www.nime2011.org/proceedings/papers/G23-Skogstad.pdf},
Webpdf = {http://www.nime2011.org/proceedings/papers/G23-Skogstad.pdf},
Year = {2011},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QIy4uL1BERnMvU2tvZ3N0YWQvU2tvZ3N0YWRfMjAxMWEucGRm0hcLGBlXTlMuZGF0YU8RAbgAAAAAAbgAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMo8jSNIKwAAABEd5RJTa29nc3RhZF8yMDExYS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGHVGyw65pgAAAAAAAAAAAAEAAwAACSAAAAAAAAAAAAAAAAAAAAAIU2tvZ3N0YWQAEAAIAADKPHEDAAAAEQAIAADLDquWAAAAAQAUABEd5QAG0+oABW3NAAUAyAAAvzEAAgBLTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBTa29nc3RhZDoAU2tvZ3N0YWRfMjAxMWEucGRmAAAOACYAEgBTAGsAbwBnAHMAdABhAGQAXwAyADAAMQAxAGEALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADlVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9Ta29nc3RhZC9Ta29nc3RhZF8yMDExYS5wZGYAABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AtAC5AMECfQJ/AoQCjwKYAqYCqgKxAroCvwLMAs8C4QLkAukAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC6w==},
Bdsk-Url-1 = {http://www.nime2011.org/proceedings/papers/G23-Skogstad.pdf},
Bdsk-Url-2 = {http://urn.nb.no/URN:NBN:no-29815}}

@inproceedings{Schoonderwaldt:2011,
Abstract = {We report on a performance study of a French-Canadian fiddler. The fiddling tradition forms an interesting contrast to classical violin performance in several ways. Distinguishing features include special elements in the bowing technique and the presence of an accompanying foot clogging pattern. These two characteristics are described, visualized and analyzed using video and motion capture recordings as source material.},
Address = {Oslo},
Author = {Erwin Schoonderwaldt and Jensenius, Alexander Refsum},
Booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression},
Date-Added = {2011-09-27 18:28:43 +0000},
Date-Modified = {2012-09-10 11:14:10 +0000},
Issn = {2220-4806},
Keywords = {fiddler, violin, French-Canadian, bowing, feet, clogging, motion capture, video, motiongram, kinematics, sonification},
Pages = {256-259},
Title = {Effective and expressive movements in a French-Canadian fiddler’s performance},
Url = {http://urn.nb.no/URN:NBN:no-29437},
Webpdf = {http://urn.nb.no/URN:NBN:no-29437},
Year = {2011},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QLi4uL1BERnMvU2Nob29uZGVyd2FsZHQvU2Nob29uZGVyd2FsZHRfMjAxMS5wZGbSFwsYGVdOUy5kYXRhTxEB3AAAAAAB3AACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADTnPF1NjaG9vbmRlcndhbGR0XzIwMTEucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANOeDKp+NWAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAA5TY2hvb25kZXJ3YWxkdAAQAAgAAMo8cQMAAAARAAgAAMqnxzYAAAABABQADTnPAAbT6gAFbc0ABQDIAAC/MQACAFZNYWNpbnRvc2ggSEQ6VXNlcnM6AGFsZXhhbmplOgBSZWZlcmVuY2U6AFBERnM6AFNjaG9vbmRlcndhbGR0OgBTY2hvb25kZXJ3YWxkdF8yMDExLnBkZgAOADAAFwBTAGMAaABvAG8AbgBkAGUAcgB3AGEAbABkAHQAXwAyADAAMQAxAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgBEVXNlcnMvYWxleGFuamUvUmVmZXJlbmNlL1BERnMvU2Nob29uZGVyd2FsZHQvU2Nob29uZGVyd2FsZHRfMjAxMS5wZGYAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgC/AMQAzAKsAq4CswK+AscC1QLZAuAC6QLuAvsC/gMQAxMDGAAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAAMa},
Bdsk-Url-1 = {http://www.nime2011.org/proceedings/papers/G12-Schoonderwaldt.pdf},
Bdsk-Url-2 = {http://urn.nb.no/URN:NBN:no-29437}}

@inproceedings{Jensenius:2011,
Abstract = {We present the results of a pilot study on how micromovements may be used in an interactive dance/music performance. Micromovements are subtle body movements that cannot be easily seen by the human eye. Using an infrared marker-based motion capture system we have explored micromovements through 15×10 minutes long observation studies of ourselves standing still or moving slowly. The results from these studies show that there are both consistent and inconsistent movement patterns to be found at various temporal levels. Experimentation with three different types of realtime sonification shows artistic potential in using micromovements in dance and music performance.},
Address = {Esbjerg, Denmark},
Annote = {Proceedings of Second International ICST Conference on Arts and Technology},
Author = {Jensenius, Alexander Refsum and Bjerkestrand, Kari Anne Vedsteinsvik},
Booktitle = {Proceedings of ICST Conference on Arts and Technology},
Date-Added = {2011-09-15 08:26:58 +0000},
Date-Modified = {2012-06-05 14:22:52 +0000},
Isbn = {978-1-936968-38-1},
Title = {Exploring micromovements with motion capture and sonification},
Url = {http://www.duo.uio.no/sok/work.html?WORKID=165863&fid=99133},
Year = {2011},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDExYi5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDExYi5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVPtvK0J/gAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAytCDwAAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAxMWIucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAxADEAYgAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAxMWIucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-Url-1 = {http://www.duo.uio.no/sok/work.html?WORKID=165863&fid=99133}}

@inproceedings{Nymoen:2011,
Abstract = {Simultaneous handling and synchronisation of data related to music, such as score annotations, MIDI, video, motion descriptors, sensor data, etc. requires special tools due to the diversity of the data. We present a toolbox for recording and playback of complex music-related data. Using the Sound Description Interchange Format as a storage format and the Open Sound Control protocol as a streaming protocol simplifies exchange of data between composers and researchers.},
Address = {Padova, Italy},
Author = {Nymoen, Kristian and Jensenius, Alexander Refsum},
Booktitle = {Proceedings of the Sound and Music Computing Conference},
Date-Added = {2011-05-06 11:36:18 +0200},
Date-Modified = {2012-08-17 13:35:11 +0000},
Isbn = {9788897385035},
Pages = {427–430},
Publisher = {Padova University Press},
Title = {A toolbox for storing and streaming music-related data},
Url = {http://urn.nb.no/URN:NBN:no-28510},
Webpdf = {http://urn.nb.no/URN:NBN:no-28510},
Year = {2011},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QHi4uL1BERnMvTnltb2VuL055bW9lbl8yMDExLnBkZtIXCxgZV05TLmRhdGFPEQGkAAAAAAGkAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAANC3wPTnltb2VuXzIwMTEucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA0L0spvK5oAAAAAAAAAAAABAAMAAAkgAAAAAAAAAAAAAAAAAAAABk55bW9lbgAQAAgAAMo8cQMAAAARAAgAAMpvD3oAAAABABQADQt8AAbT6gAFbc0ABQDIAAC/MQACAEZNYWNpbnRvc2ggSEQ6VXNlcnM6AGFsZXhhbmplOgBSZWZlcmVuY2U6AFBERnM6AE55bW9lbjoATnltb2VuXzIwMTEucGRmAA4AIAAPAE4AeQBtAG8AZQBuAF8AMgAwADEAMQAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIANFVzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL055bW9lbi9OeW1vZW5fMjAxMS5wZGYAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgCvALQAvAJkAmYCawJ2An8CjQKRApgCoQKmArMCtgLIAssC0AAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALS},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-28510}}

@inproceedings{Chandra:2012,
Abstract = {The paper presents the interactive music system SoloJam, which allows a group of participants with little or no musical training to effectively play together in a “band-like” setting. It allows the participants to take turns playing solos made up of rhythmic pattern sequences. We specify the issue at hand for allowing such participation as being the requirement of decentralised coherent circulation of playing solos. This is to be realised by some form of intelligence within the devices used for participation. Here we take inspiration from the Economic Sciences, and propose this intelligence to take the form of making devices possessing the capability of evaluating their utility of playing the next solo, the capability of holding auctions, and of bidding within them. We show that holding auctions and bidding within them enables decentralisation of co-ordinating solo circulation, and a properly designed utility function enables coherence in the musical output. The approach helps achieve decentralised coherent circulation with artificial agents simulating human participants. The effectiveness of the approach is further supported when human users participate. As a result, the approach is shown to be effective at enabling participants with little or no musical training to play together in SoloJam.},
Address = {London},
Annote = {http://cmmr2012.eecs.qmul.ac.uk/sites/cmmr2012.eecs.qmul.ac.uk/files/pdf/papers/cmmr2012_submission_64.pdf},
Author = {Chandra, Arjun and Nymoen, Kristian and Voldsund, Arve and Jensenius, Alexander Refsum and Glette, Kyrre and Torresen, Jim},
Booktitle = {Proceedings of the 9th International Symposium on Computer Music Modelling and Retrieval},
Date-Added = {2012-07-05 10:58:22 +0000},
Date-Modified = {2012-08-17 13:28:02 +0000},
Pages = {674–689},
Title = {Enabling Participants to Play Rhythmic Solos Within a Group via Auctions},
Url = {http://www.duo.uio.no/sok/work.html?WORKID=167891&fid=101016},
Year = {2012},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QIC4uL1BERnMvQ2hhbmRyYS9DaGFuZHJhXzIwMTIucGRm0hcLGBlXTlMuZGF0YU8RAawAAAAAAawAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMo8jSNIKwAAAKai1BBDaGFuZHJhXzIwMTIucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAApqK2zBtAiwAAAAAAAAAAAAEAAwAACSAAAAAAAAAAAAAAAAAAAAAHQ2hhbmRyYQAAEAAIAADKPHEDAAAAEQAIAADMGyRrAAAAAQAUAKai1AAG0+oABW3NAAUAyAAAvzEAAgBITWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBDaGFuZHJhOgBDaGFuZHJhXzIwMTIucGRmAA4AIgAQAEMAaABhAG4AZAByAGEAXwAyADAAMQAyAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgA2VXNlcnMvYWxleGFuamUvUmVmZXJlbmNlL1BERnMvQ2hhbmRyYS9DaGFuZHJhXzIwMTIucGRmABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AsQC2AL4CbgJwAnUCgAKJApcCmwKiAqsCsAK9AsAC0gLVAtoAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC3A==},
Bdsk-Url-1 = {http://cmmr2012.eecs.qmul.ac.uk/sites/cmmr2012.eecs.qmul.ac.uk/files/pdf/papers/cmmr2012_submission_64.pdf},
Bdsk-Url-2 = {http://www.duo.uio.no/sok/work.html?WORKID=167891&fid=101016}}

@inproceedings{Jensenius:2012h,
Abstract = {Motiongrams are visual representations of human motion, generated from regular video recordings. This paper evaluates how different video features may influence the generated motiongram: inversion, colour, filtering, background, lighting, clothing, video size and compression. It is argued that the proposed motiongram implementation is capable of visualising the main motion features even with quite drastic changes in all of the above mentioned variables.},
Address = {Copenhagen},
Author = {Jensenius, Alexander Refsum},
Booktitle = {Proceedings of the 9th Sound and Music Computing Conference},
Date-Added = {2012-05-30 10:11:07 +0000},
Date-Modified = {2012-08-21 06:06:15 +0000},
Pages = {467–472},
Title = {Evaluating How Different Video Features Influence the Visual Quality of Resultant Motiongrams},
Url = {http://www.duo.uio.no/sok/work.html?WORKID=167468&fid=100543},
Year = {2012},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDEyYi5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDEyYi5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACKhJDLm8foAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAy5uryAAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAxMmIucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAxADIAYgAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAxMmIucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-File-2 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDEyZC5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDEyZC5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACr73vMJJJkUERGIENBUk8AAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAzCR2RAAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAxMmQucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAxADIAZAAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAxMmQucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-Url-1 = {http://www.duo.uio.no/sok/work.html?WORKID=167468&fid=100543}}

@inproceedings{Jensenius:2012i,
Abstract = {With musical applications in mind, this paper reports on the level of noise observed in two commercial infrared marker-based motion capture systems: one high-end (Qualisys) and one affordable (OptiTrack). We have tested how various features (calibration volume, marker size, sampling frequency, etc.) influence the noise level of markers lying still, and fixed to subjects standing still. The conclusion is that the motion observed in humans standing still is usually considerably higher than the noise level of the systems. Dependent on the system and its calibration, however, the signal-to-noise-ratio may in some cases be problematic.},
Address = {Copenhagen},
Author = {Jensenius, Alexander Refsum and Nymoen, Kristian and Skogstad, St{\aa}le A. and Voldsund, Arve},
Booktitle = {Proceedings of ?the Sound and Music Computing Conference},
Date-Added = {2012-05-30 10:09:38 +0000},
Date-Modified = {2012-08-24 10:30:02 +0000},
Pages = {258–263},
Title = {A Study of the Noise-Level in Two Infrared Marker-Based Motion Capture Systems},
Url = {http://www.duo.uio.no/sok/work.html?WORKID=167469&fid=100544},
Year = {2012},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDEyYy5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDEyYy5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACKhKrLnyR4AAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAy58IWAAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAxMmMucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAxADIAYwAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAxMmMucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-File-2 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDEyZS5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDEyZS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACr717MJJIhUERGIENBUk8AAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAzCR2AQAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAxMmUucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAxADIAZQAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAxMmUucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-Url-1 = {http://www.duo.uio.no/sok/work.html?WORKID=167469&fid=100544}}

@inproceedings{Godoy:2012,
Abstract = {Our research on music-related actions is based on the conviction that sensations of both sound and body motion are inseparable in the production and perception of music. The expression “music-related actions” is here used to refer to chunks of combined sound and body motion, typically in the duration range of approximately 0.5 to 5 seconds. We believe that chunk-level music-related actions are highly significant for the experience of music, and we are presently working on establishing a database of music-related actions in order to facilitate access to, and research on, our fast growing collection of motion capture data and related material. In this work, we are confronted with a number of perceptual, conceptual and technological issues regarding classification of music-related actions, issues that will be presented and discussed in this paper.},
Address = {Thessaloniki, Greece},
Annote = {
Rolf Inge God{\o}y*1, Jensenius, Alexander Refsum*2, Arve Voldsund*3, Kyrre Glette#4, Mats H{\o}vin#5, Kristian Nymoen#6, St{\aa}le Skogstad#7, Jim T{\o}rresen#8},
Author = {God{\o}y, Rolf Inge and Jensenius, Alexander Refsum and Voldsund, Arve and Glette, Kyrre and H{\o}vin, Mats Erling and Nymoen, Kristian and Skogstad, St{\aa}le A. and Torresen, Jim},
Booktitle = {Proceedings of 12th International Conference on Music Perception and Cognition},
Date-Added = {2012-05-22 13:23:56 +0000},
Date-Modified = {2012-05-22 13:26:43 +0000},
Title = {Classifying Music-Related Actions},
Year = {2012},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YW8QHAAuAC4ALwBQAEQARgBzAC8ARwBvAGQA+AB5AC8ARwBvAGQA+AB5AF8AMgAwADEAMgAuAHAAZABm0hcLGBlXTlMuZGF0YU8RAaAAAAAAAaAAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMo8jSNIKwAAAAjKgw5Hb2S/eV8yMDEyLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAg4b5y9/0HQAAAAAAAAAAAAEAAwAACSAAAAAAAAAAAAAAAAAAAAAFR29kv3kAABAACAAAyjxxAwAAABEACAAAy9/X/QAAAAEAFAAIyoMABtPqAAVtzQAFAMgAAL8xAAIARE1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoAR29kv3k6AEdvZL95XzIwMTIucGRmAA4AHgAOAEcAbwBkAPgAeQBfADIAMAAxADIALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADRVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9Hb2TDuHkvR29kw7h5XzIwMTIucGRmABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AyQDOANYCegJ8AoECjAKVAqMCpwKuArcCvALJAswC3gLhAuYAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC6A==},
Bdsk-File-2 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YW8QHQAuAC4ALwBQAEQARgBzAC8ARwBvAGQA+AB5AC8ARwBvAGQA+AB5AF8AMgAwADEAMgBhAC4AcABkAGbSFwsYGVdOUy5kYXRhTxEBpgAAAAABpgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAACMqDD0dvZL95XzIwMTJhLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADUSozMP+v+AAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAVHb2S/eQAAEAAIAADKPHEDAAAAEQAIAADMP8/eAAAAAQAUAAjKgwAG0+oABW3NAAUAyAAAvzEAAgBFTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBHb2S/eToAR29kv3lfMjAxMmEucGRmAAAOACAADwBHAG8AZAD4AHkAXwAyADAAMQAyAGEALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADVVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9Hb2TDuHkvR29kw7h5XzIwMTJhLnBkZgAAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgDLANAA2AKCAoQCiQKUAp0CqwKvArYCvwLEAtEC1ALmAukC7gAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALw}}

@inproceedings{Torresen:2012,
Abstract = {We present a new wireless transceiver board for the CUI32 sensor interface, aimed at creating a solution that is flexible, reliable, and with little power consumption. Communication with the board is based on the ZigFlea protocol and it has been evaluated on a CUI32 using the StickOS oper- ating system. Experiments show that the total sensor data collection time is linearly increasing with the number of sensor samples used. A data rate of 0.8 kbit/s is achieved for wirelessly transmitting three axes of a 3D accelerometer. Although this data rate is low compared to other systems, our solution benefits from ease-of-use and stability, and is useful for applications that are not time-critical.},
Address = {Ann Arbor, Michigan},
Author = {Torresen, Jim and Hauback, {\O}yvind Nyborg and Overholt, Dan and Jensenius, Alexander Refsum},
Booktitle = {Proceedings of the International Conference on New Interfaces For Musical Expression},
Date-Added = {2012-05-21 02:07:42 +0000},
Date-Modified = {2012-06-24 15:58:58 +0000},
Pages = {409–412},
Title = {Development and Evaluation of a {Z}ig{F}lea-based Wireless Transceiver Board for {CUI32}},
Url = {http://www.duo.uio.no/sok/work.html?WORKID=166399&fid=99533},
Year = {2012},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QIi4uL1BERnMvVG9ycmVzZW4vVG9ycmVzZW5fMjAxMi5wZGbSFwsYGVdOUy5kYXRhTxEBsgAAAAABsgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAAER/7EVRvcnJlc2VuXzIwMTIucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACDT/7LweBMAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAhUb3JyZXNlbgAQAAgAAMo8cQMAAAARAAgAAMvBxCwAAAABABQAER/7AAbT6gAFbc0ABQDIAAC/MQACAEpNYWNpbnRvc2ggSEQ6VXNlcnM6AGFsZXhhbmplOgBSZWZlcmVuY2U6AFBERnM6AFRvcnJlc2VuOgBUb3JyZXNlbl8yMDEyLnBkZgAOACQAEQBUAG8AcgByAGUAcwBlAG4AXwAyADAAMQAyAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgA4VXNlcnMvYWxleGFuamUvUmVmZXJlbmNlL1BERnMvVG9ycmVzZW4vVG9ycmVzZW5fMjAxMi5wZGYAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgCzALgAwAJ2AngCfQKIApECnwKjAqoCswK4AsUCyALaAt0C4gAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALk},
Bdsk-Url-1 = {http://www.duo.uio.no/sok/work.html?WORKID=166399&fid=99533}}

@inproceedings{Skogstad:2012,
Abstract = {In this paper we present the Dance Jockey System, a system developed for using a full body inertial motion capture suit (Xsens MVN) in music/dance performances. We present different strategies for extracting relevant postures and actions from the continuous data, and how these postures and actions can be used to control sonic and musical features. The system has been used in several public performances, and we believe it has great potential for further exploration. However, to overcome the current practical and technical challenges when working with the system, it is important to further refine tools and software in order to facilitate making of new performance pieces.},
Address = {Ann Arbor, Michigan},
Author = {Skogstad, St{\aa}le A. and Nymoen, Kristian and de Quay, Yago and Jensenius, Alexander Refsum},
Booktitle = {Proceedings of the International Conference on New Interfaces For Musical Expression},
Date-Added = {2012-05-21 01:56:56 +0000},
Date-Modified = {2012-07-03 06:02:26 +0000},
Pages = {226–229},
Title = {Developing the {D}ance {J}ockey System for Musical Interaction with the {X}sens {MVN} Suit},
Url = {http://www.duo.uio.no/sok/work.html?WORKID=167131&fid=100300},
Year = {2012},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QIi4uL1BERnMvU2tvZ3N0YWQvU2tvZ3N0YWRfMjAxMi5wZGbSFwsYGVdOUy5kYXRhTxEBsgAAAAABsgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAAER3lEVNrb2dzdGFkXzIwMTIucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACDT6nLweBIAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAhTa29nc3RhZAAQAAgAAMo8cQMAAAARAAgAAMvBxCgAAAABABQAER3lAAbT6gAFbc0ABQDIAAC/MQACAEpNYWNpbnRvc2ggSEQ6VXNlcnM6AGFsZXhhbmplOgBSZWZlcmVuY2U6AFBERnM6AFNrb2dzdGFkOgBTa29nc3RhZF8yMDEyLnBkZgAOACQAEQBTAGsAbwBnAHMAdABhAGQAXwAyADAAMQAyAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgA4VXNlcnMvYWxleGFuamUvUmVmZXJlbmNlL1BERnMvU2tvZ3N0YWQvU2tvZ3N0YWRfMjAxMi5wZGYAEwABLwAAFQACAA///wAAgAbSGxwdHlokY2xhc3NuYW1lWCRjbGFzc2VzXU5TTXV0YWJsZURhdGGjHR8gVk5TRGF0YVhOU09iamVjdNIbHCIjXE5TRGljdGlvbmFyeaIiIF8QD05TS2V5ZWRBcmNoaXZlctEmJ1Ryb290gAEACAARABoAIwAtADIANwBAAEYATQBVAGAAZwBqAGwAbgBxAHMAdQB3AIQAjgCzALgAwAJ2AngCfQKIApECnwKjAqoCswK4AsUCyALaAt0C4gAAAAAAAAIBAAAAAAAAACgAAAAAAAAAAAAAAAAAAALk},
Bdsk-Url-1 = {http://www.duo.uio.no/sok/work.html?WORKID=167131&fid=100300}}

@inproceedings{Nymoen:2012a,
Abstract = {The paper presents an analysis of the quality of motion data from an iPod Touch (4th gen.). Acceleration and orientation data derived from internal sensors of an iPod is compared to data from a high end optical infrared marker-based motion capture system (Qualisys) in terms of latency, jitter, accuracy and precision. We identify some rotational drift in the iPod, and some time lag between the two systems. Still, the iPod motion data is quite reliable, especially for describing relative motion over a short period of time.},
Address = {Ann Arbor, Michigan},
Author = {Nymoen, Kristian and Voldsund, Arve and Skogstad, St{\aa}le Andreas van Dorp and Jensenius, Alexander Refsum and T{\o}rresen, Jim},
Booktitle = {Proceedings of the International Conference on New Interfaces For Musical Expression},
Date-Added = {2012-03-27 07:17:05 +0000},
Date-Modified = {2012-06-07 07:42:45 +0000},
Pages = {88–91},
Title = {Comparing Motion Data from an i{P}od Touch to a High-End Optical Infrared Marker-Based Motion Capture System},
Url = {http://www.duo.uio.no/sok/work.html?WORKID=165865&fid=99230},
Year = {2012},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QHy4uL1BERnMvTnltb2VuL055bW9lbl8yMDEyYS5wZGbSFwsYGVdOUy5kYXRhTxEBqgAAAAABqgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADQt8EE55bW9lbl8yMDEyYS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACDT5vLweBKAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAZOeW1vZW4AEAAIAADKPHEDAAAAEQAIAADLwcQqAAAAAQAUAA0LfAAG0+oABW3NAAUAyAAAvzEAAgBHTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBOeW1vZW46AE55bW9lbl8yMDEyYS5wZGYAAA4AIgAQAE4AeQBtAG8AZQBuAF8AMgAwADEAMgBhAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgA1VXNlcnMvYWxleGFuamUvUmVmZXJlbmNlL1BERnMvTnltb2VuL055bW9lbl8yMDEyYS5wZGYAABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AsAC1AL0CawJtAnICfQKGApQCmAKfAqgCrQK6Ar0CzwLSAtcAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC2Q==},
Bdsk-Url-1 = {http://www.duo.uio.no/sok/work.html?WORKID=165865&fid=99230}}

@inproceedings{Jensenius:2012e,
Abstract = {We report on the Music Ball Project, a longterm, exploratory project focused on creating novel instruments/controllers with a spherical shape as the common denominator. Besides a simple and attractive geometrical shape, balls afford many different types of use, including play. This has made our music balls popular among widely different groups of people, from toddlers to seniors, including those that would not otherwise engage with a musical instrument. The paper summarises our experience of designing, constructing and using a number of music balls of various sizes and with different types of sound-producing elements.},
Address = {Ann Arbor, Michigan},
Author = {Jensenius, Alexander Refsum and Voldsund, Arve},
Booktitle = {Proceedings of the International Conference on New Interfaces For Musical Expression},
Date-Added = {2012-02-07 09:08:56 +0000},
Date-Modified = {2012-06-24 15:58:31 +0000},
Pages = {300–303},
Title = {The Music Ball Project: Concept, Design, Development, Performance},
Url = {http://www.duo.uio.no/sok/work.html?WORKID=166398&fid=99532},
Year = {2012},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDEyYS5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDEyYS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACDUD/LFgPxAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAyxX14QAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAxMmEucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAxADIAYQAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAxMmEucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-Url-1 = {http://www.duo.uio.no/sok/work.html?WORKID=166398&fid=99532}}

@inproceedings{Jensenius:2012d,
Abstract = {The paper presents a method for sonification of human body motion based on motiongrams. Motiongrams show the spatiotemporal development of body motion by plotting average matrices of motion images over time. The resultant visual representation resembles spectrograms, and is treated as such by the new sonifyer module for Jamoma for Max, which turns motiongrams into sound by reading a part of the matrix and passing it on to an oscillator bank. The method is surprisingly simple, and has proven to be useful for analytical applications and in interactive music systems.},
Address = {Valencia},
Annote = {Best paper award},
Author = {Jensenius, Alexander Refsum},
Booktitle = {Proceedings of the Fifth International Conference on Advances in Computer-Human Interactions},
Date-Added = {2011-09-15 08:17:26 +0000},
Date-Modified = {2012-12-06 10:26:30 +0000},
Isbn = {978-1-61208-177-9},
Pages = {170–175},
Title = {Motion-sound Interaction Using Sonification based on Motiongrams},
Url = {http://urn.nb.no/URN:NBN:no-30588},
Year = {2012},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJC4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDEyLnBkZtIXCxgZV05TLmRhdGFPEQG6AAAAAAG6AAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAAMwroSSmVuc2VuaXVzXzIwMTIucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACnjCctGHTIAAAAAAAAAAAABAAMAAAkgAAAAAAAAAAAAAAAAAAAACUplbnNlbml1cwAAEAAIAADKPHEDAAAAEQAIAADLRg8iAAAAAQAUAAzCugAG0+oABW3NAAUAyAAAvzEAAgBMTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBKZW5zZW5pdXM6AEplbnNlbml1c18yMDEyLnBkZgAOACYAEgBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAxADIALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADpVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9KZW5zZW5pdXMvSmVuc2VuaXVzXzIwMTIucGRmABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AtQC6AMICgAKCAocCkgKbAqkCrQK0Ar0CwgLPAtIC5ALnAuwAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC7g==},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-30588}}

@inproceedings{Jensenius:2013e,
Abstract = {The paper presents the Kinectofon, an instrument for creat- ing sounds through free-hand interaction in a 3D space. The instrument is based on the RGB and depth image streams retrieved from a Microsoft Kinect sensor device. These two image streams are used to create different types of motion- grams, which, again, are used as the source material for a sonification process based on inverse FFT. The instrument is intuitive to play, allowing the performer to create sound by “touching” a virtual sound wall.},
Address = {Daejeon, Korea},
Author = {Jensenius, Alexander Refsum},
Booktitle = {Proceedings of the International Conference on New Interfaces For Musical Expression},
Date-Added = {2013-05-18 19:29:05 +0000},
Date-Modified = {2013-05-18 19:40:21 +0000},
Title = {Kinectofon: Performing with Shapes in Planes},
Year = {2013}}

@inproceedings{Bevilacqua:2013,
Abstract = {This SIG intends to investigate the ongoing dialogue between music technology and the field of human-computer interaction. Our specific aims are to consider major findings of musical interface research over recent years and discuss how these might best be conveyed to CHI researchers interested but not yet active in this area, as well as to consider how to stimulate future collaborations between music technology and CHI research communities.},
Address = {Paris},
Author = {Bevilacqua, Fr{\’e}d{\’e}ric and Fels, Sidney S. and Jensenius, Alexander Refsum and Lyons, Michael and Schnell, Norbert and Tanaka, Atau},
Booktitle = {CHI 2013},
Date-Added = {2013-05-01 06:12:20 +0000},
Date-Modified = {2013-05-02 19:21:15 +0000},
Pages = {2529–2532},
Title = {SIG NIME: Music, Technology, and Human-Computer Interaction},
Url = {http://dl.acm.org/citation.cfm?id=2468356.2468822&coll=DL&dl=GUIDE&CFID=212477371&CFTOKEN=33106905},
Year = {2013},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJy4uL1BERnMvQmV2aWxhY3F1YS9CZXZpbGFjcXVhXzIwMTNhLnBkZtIXCxgZV05TLmRhdGFPEQHGAAAAAAHGAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAAG2KYUQmV2aWxhY3F1YV8yMDEzYS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf5Ots2nMIMAAAAAAAAAAAABAAMAAAkgAAAAAAAAAAAAAAAAAAAACkJldmlsYWNxdWEAEAAIAADKPHEDAAAAEQAIAADNpxRjAAAAAQAUAAbYpgAG0+oABW3NAAUAyAAAvzEAAgBPTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBCZXZpbGFjcXVhOgBCZXZpbGFjcXVhXzIwMTNhLnBkZgAADgAqABQAQgBlAHYAaQBsAGEAYwBxAHUAYQBfADIAMAAxADMAYQAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAPVVzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0JldmlsYWNxdWEvQmV2aWxhY3F1YV8yMDEzYS5wZGYAABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AuAC9AMUCjwKRApYCoQKqArgCvALDAswC0QLeAuEC8wL2AvsAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC/Q==}}

@inproceedings{Jensenius:2013f,
Address = {Stockholm},
Author = {Jensenius, Alexander Refsum},
Booktitle = {Proceedings of Sound and Music Computing},
Date-Added = {2013-05-18 19:40:51 +0000},
Date-Modified = {2013-05-18 19:41:35 +0000},
Title = {Non-Realtime Sonification of Motiongrams},
Year = {in review}}

@mastersthesis{Jensenius:2002,
Abstract = {This project started with the observation that we manage to recognize a song by listening to only a second of it. What perceptual and musical features make this possible, and can such features be used in music analysis and music information retrieval? These questions can be broken down to two main problems: a) segregation of sensory input and b) recognition of musical features. The segregation of musical information from a complex soundscape is discussed with reference to theories of auditory scene analysis and music perception. A problem is that there is still no good way to make computers separate sound streams in a way similar to human perception. When it comes to the recognition process, the thesis focuses on what musical features make a song more or less recognizable. It is argued that a song is recognized quicker if there is some salient, or perceptually significant, feature present. Then it is shown how salience points can be analysed with reference to traditional musical parameters such as melody, harmony, rhythm and dynamics. This discussion leads to an acknowledgment of the significance of sound in music perception. Next, different methods of analysing, visualizing and synthesizing sound, or more specifically instrument timbre, is shown. Finally, theories of artificial neural networks are outlined, with an example of training a feedforward network with timbre. The success of this simulation is taken as an indication that connectionist models may resemble human perception. Throughout the thesis, several examples are shown of how the graphical programming environment MAX/MSP can be used experimentally in music analysis. The thesis concludes that investigating short term music excerpts might be interesting in music analysis. Due to the limitations of our short term memory, such short passages may reveal noteworthy aspects of music perception. It is also suggested that music theory could benefit from studying salience points and paying more attention to the sound of music.},
Address = {Oslo, Norway},
Author = {Jensenius, Alexander Refsum},
Date-Modified = {2012-06-06 08:03:57 +0000},
Keywords = {Music Cognition},
Local-Url = {file://localhost/Users/alexanje/Reference/Bibdesk/Jensenius/Jensenius_2002.pdf},
School = {University of Oslo},
Title = {How Do We Recognize a Song in One Second? {T}he Importance of Salience and Sound in Music Perception},
Type = {Cand. philol. thesis},
Url = {http://urn.nb.no/URN:NBN:no-7119},
Webpdf = {http://urn.nb.no/URN:NBN:no-7119},
Year = {2002},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJC4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDAyLnBkZtIXCxgZV05TLmRhdGFPEQG6AAAAAAG6AAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAAMwroSSmVuc2VuaXVzXzIwMDIucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAzDQ8Gi0jcAAAAAAAAAAAABAAMAAAkgAAAAAAAAAAAAAAAAAAAACUplbnNlbml1cwAAEAAIAADKPHEDAAAAEQAIAADBosQnAAAAAQAUAAzCugAG0+oABW3NAAUAyAAAvzEAAgBMTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBKZW5zZW5pdXM6AEplbnNlbml1c18yMDAyLnBkZgAOACYAEgBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAwADIALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADpVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9KZW5zZW5pdXMvSmVuc2VuaXVzXzIwMDIucGRmABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AtQC6AMICgAKCAocCkgKbAqkCrQK0Ar0CwgLPAtIC5ALnAuwAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC7g==},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-7119}}

@mastersthesis{Jensenius:2005b,
Abstract = {This thesis presents my explorations into the field of multimedia performance, broadening the scope of my musical background and investigating the differences between various media and expressions. The thesis starts by presenting some of the theoretical foundations for this project, including multimodal perception, embodied cognition, interaction design, nonlinear thinking and hypermedia theory. This is followed by presentations of a series of creative projects where I have been exploring the boundaries between time and space in photography, live visuals and music. Examples are also shown of how game controllers can be used for artistic purposes, using the MultiControl software I created to communicate with the HID protocol. Finally, I present the Interactive Mass, a large-scale, networked, nonlinear, multimedia performance system intended for organizing a Christian service where the audience control both content and structure. The system was built around a server containing pictures, sounds, videos and text fragments. Using various multimedia stations and objects around the space, more content could easily be added during the mass, and it would form the content of what musicians, dancers and priests would be accessing. The result was a series of truly different Christian services performed at various locations in Norway, and the system proved stable and scalable, allowing interaction for groups from 10 to 250 people. The Interactive Mass proved as an efficient testbed for my thoughts about creating a hypermedia system for use in live performance, and extending the traditional ideas about body, time and space.},
Address = {G{\o}teborg, Sweden},
Author = {Jensenius, Alexander Refsum},
Date-Added = {2007-01-08 12:54:58 +0100},
Date-Modified = {2007-10-29 15:44:52 +0100},
Local-Url = {file://localhost/Users/alexanje/Reference/Bibdesk/Jensenius/Jensenius_2005a.pdf},
School = {Chalmers Institute of Technology},
Title = {Towards Hypermedia Performance — On the Boundaries Between Body, Time, Space},
Type = {M.Sc. Thesis},
Year = {2005},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDA1YS5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDA1YS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMw42/6Jc1AAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAv+iJJQAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAwNWEucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAwADUAYQAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAwNWEucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=}}

@misc{Jensenius:2009a,
Author = {Jensenius, Alexander Refsum and Nymoen, Kristian},
Date-Added = {2013-04-05 10:12:32 +0000},
Date-Modified = {2013-04-05 10:31:01 +0000},
Title = {Video{A}nalysis [Computer program]},
Url = {http://www.fourms.uio.no/software/},
Year = {2009},
Bdsk-Url-1 = {http://www.fourms.uio.no/software/}}

@misc{Jensenius:2012b,
Abstract = {Trodde du at du har full kontroll over kroppen din? Det har du ikke. P{\aa} UiO forskes det n{\aa} p{\aa} kroppens ukontrollerte bevegelser.},
Author = {Jensenius, Alexander Refsum and Nymoen, Kristian and Havnes, Heljar and Vold, Skjalg B{\o}hmer},
Date-Added = {2012-08-17 07:21:22 +0000},
Date-Modified = {2012-08-17 07:21:57 +0000},
Title = {Klarer ikke st{\aa} stille},
Year = {2012}}

@misc{Jensenius:2013c,
Author = {Jensenius, Alexander Refsum},
Date-Added = {2013-01-14 13:07:23 +0000},
Date-Modified = {2013-04-05 10:30:50 +0000},
Title = {Image{S}onifyer [Computer program]},
Url = {http://www.fourms.uio.no/software/},
Year = {2013},
Bdsk-Url-1 = {http://www.fourms.uio.no/software/}}

@periodical{Jensenius:2008,
Author = {Jensenius, Alexander Refsum},
Date-Added = {2011-09-03 16:56:29 +0000},
Date-Modified = {2012-07-02 09:07:16 +0000},
Journal = {Universitas},
Keywords = {intervju, jensenius},
Title = {Pensum},
Year = {2008},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDA4Yi5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDA4Yi5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMxJPKiCBfAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAyogEPwAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAwOGIucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAwADgAYgAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAwOGIucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=}}

@periodical{Jensenius:2010d,
Abstract = {Paradoksalt nok opplever jeg at det er enklere {\aa} samarbeide med kolleger, b{\aa}de kunstneriske og vitenskapelige, i Europa og Nord-Amerika, enn i Oslo by. Det skriver kronikkforfatteren, som vil lette samarbeidet mellom ulike vitenskaper, og mellom kunst og vitenskap.},
Author = {Jensenius, Alexander Refsum},
Date-Added = {2010-08-30 19:39:39 +0200},
Date-Modified = {2012-07-02 09:14:50 +0000},
Journal = {Forskerforum},
Pages = {32–33},
Title = {Oppmuntring til forskning p{\aa} tvers},
Volume = {7},
Year = {2010},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJC4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDEwLnBkZtIXCxgZV05TLmRhdGFPEQG6AAAAAAG6AAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAAMwroSSmVuc2VuaXVzXzIwMTAucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAzE/cihuS0AAAAAAAAAAAABAAMAAAkgAAAAAAAAAAAAAAAAAAAACUplbnNlbml1cwAAEAAIAADKPHEDAAAAEQAIAADIoZ0NAAAAAQAUAAzCugAG0+oABW3NAAUAyAAAvzEAAgBMTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBKZW5zZW5pdXM6AEplbnNlbml1c18yMDEwLnBkZgAOACYAEgBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAxADAALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADpVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9KZW5zZW5pdXMvSmVuc2VuaXVzXzIwMTAucGRmABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AtQC6AMICgAKCAocCkgKbAqkCrQK0Ar0CwgLPAtIC5ALnAuwAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC7g==}}

@phdthesis{Jensenius:2007,
Abstract = {Body movement is integral to both performance and perception of music, and this dissertation suggests that we also think about music as movement. Based on ideas of embodied music cognition, it is argued that ecological knowledge of action-sound couplings guide our experience of music, both in perception and performance. Then follows a taxonomy of music-related body movements, before various observation studies of perceiver’s music-movement correspondences are presented: air instrument performance, free dance to music, and sound-tracing. These studies showed that both novices and experts alike seem to associate various types of body movement with features in the musical sound. Knowledge from the observation studies was used in the exploration of artificial action- sound relationships through the development of various prototype music controllers, including the Cheapstick, music balls, and the Music Troll. This exploration showed that it is possible to create low-cost and human-friendly music controllers that may be both intuitive and creatively interesting. The last part of the dissertation presents tools and methods that have been developed throughout the project, including the Musical Gestures Toolbox for the graphical programming environment Max/MSP/Jitter; techniques for creating motion history images and motiongrams of video material; and development of the Gesture Description Interchange Format (GDIF) for streaming and storing music-related movement data. These tools may be seen as an answer to many of the research questions posed in the dissertation, and have facilitated the analysis of music-related movement and creation of artificial action-sound relationships in the project.},
Author = {Jensenius, Alexander Refsum},
Date-Added = {2007-01-08 12:56:05 +0100},
Date-Modified = {2012-11-20 12:00:36 +0000},
Local-Url = {file://localhost/Users/alexanje/Reference/Bibdesk/Jensenius/Jensenius_2007b.pdf},
School = {University of Oslo},
Title = {Action–Sound: Developing Methods and Tools to Study Music-Related Body Movement},
Url = {http://urn.nb.no/URN:NBN:no-18922},
Webpdf = {http://urn.nb.no/URN:NBN:no-18922},
Year = {2007},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDA3Yi5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDA3Yi5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARIwTDqCb5AAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAw6gY6QAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAwN2IucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAwADcAYgAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAwN2IucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-18922}}

@techreport{Jensenius:1997,
Author = {Jensenius, Alexander Refsum},
Date-Modified = {2011-12-14 10:06:41 +0000},
Institution = {Nesbru videreg{\aa}ende skole},
School = {Nesbru videreg{\aa}ende skole},
Title = {A Mechanical Investigation of the Stability and Collapsing Moment of a Standing, Hollow, Paper Cylinder},
Type = {Extended Essay},
Year = {1997},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJC4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18xOTk3LnBkZtIXCxgZV05TLmRhdGFPEQG6AAAAAAG6AAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAAMwroSSmVuc2VuaXVzXzE5OTcucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAzC7MrDnWgAAAAAAAAAAAABAAMAAAkgAAAAAAAAAAAAAAAAAAAACUplbnNlbml1cwAAEAAIAADKPHEDAAAAEQAIAADKw4FIAAAAAQAUAAzCugAG0+oABW3NAAUAyAAAvzEAAgBMTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBKZW5zZW5pdXM6AEplbnNlbml1c18xOTk3LnBkZgAOACYAEgBKAGUAbgBzAGUAbgBpAHUAcwBfADEAOQA5ADcALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADpVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9KZW5zZW5pdXMvSmVuc2VuaXVzXzE5OTcucGRmABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AtQC6AMICgAKCAocCkgKbAqkCrQK0Ar0CwgLPAtIC5ALnAuwAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC7g==}}

@techreport{Jensenius:1999,
Author = {Jensenius, Alexander Refsum},
Date-Modified = {2011-12-08 20:30:15 +0000},
Institution = {Institutt for musikkvitenskap, Universitetet i Oslo},
Keywords = {Music Psychology},
School = {University of Oslo},
Title = {Digitalisering av pianolyd – Noen problemomr{\aa}der, med vekt p{\aa} fysisk signal og menneskelig oppfatning},
Type = {Term Paper},
Year = {1999},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJC4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18xOTk5LnBkZtIXCxgZV05TLmRhdGFPEQG6AAAAAAG6AAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAAMwroSSmVuc2VuaXVzXzE5OTkucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAzDJcXpXB8AAAAAAAAAAAABAAMAAAkgAAAAAAAAAAAAAAAAAAAACUplbnNlbml1cwAAEAAIAADKPHEDAAAAEQAIAADF6U4PAAAAAQAUAAzCugAG0+oABW3NAAUAyAAAvzEAAgBMTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBKZW5zZW5pdXM6AEplbnNlbml1c18xOTk5LnBkZgAOACYAEgBKAGUAbgBzAGUAbgBpAHUAcwBfADEAOQA5ADkALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADpVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9KZW5zZW5pdXMvSmVuc2VuaXVzXzE5OTkucGRmABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AtQC6AMICgAKCAocCkgKbAqkCrQK0Ar0CwgLPAtIC5ALnAuwAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC7g==}}

@techreport{Jensenius:2000,
Abstract = {Artikkelen starter med {\aa} gj{\o}re rede for de forskjellige lydene som kommer fra en datamaskin. Hovedfokuset rettes deretter mot de systemvarestyrte audiolydene, videre kalt systemlyder. Tre forskjellige sett med systemlyder fra Microsoft analyseres med tanke p{\aa} fysiske og musiske aspekter, f{\o}r forfatteren fors{\o}ker {\aa} forklare hvorfor fremveksten av slike lyder har v{\ae}rt s{\aa} stor de siste {\aa}rene. Deretter presenteres tre fiktive personers forhold til systemlyder. Artikkelen konkluderes med at systemlyder i dagens form forh{\aa}pentligvis er en forbig{\aa}ende trend og at produsentene heller b{\o}r konsentrere seg om lydapplikasjoner hvor lydkommunikasjon st{\aa}r sentralt.},
Author = {Jensenius, Alexander Refsum},
Date-Modified = {2010-11-24 08:40:46 +0100},
Institution = {University of Oslo},
Title = {Datamaskin og lyder – bruker og p{\aa}virkning},
Type = {Term Paper},
Year = {2000},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDAwYS5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDAwYS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMwzHF6XuTAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAxeltgwAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAwMGEucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAwADAAYQAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAwMGEucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=}}

@techreport{Jensenius:2000b,
Abstract = {The paper starts with presenting some of the concepts behind digital audio compression, before describing some of the most popular sound formats available today: the different standards in the MPEG-family, RealAudio, ATRAC, MS Audio, SACD and DVD Audio. The author argues that there are lots of positive aspects of sound compression, but perhaps this overwhelming popularity will limit the development of new and better standards, like Super Audio CD or DVD Audio.},
Author = {Jensenius, Alexander Refsum},
Date-Modified = {2010-11-24 08:41:26 +0100},
Institution = {Department of Music and Theatre, University of Oslo},
Title = {MP3: Friend of the Youth or Enemy of the Sound? A discussion of different sound formats and problems with sound compression},
Type = {Term Paper},
Year = {2000},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJC4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDAwLnBkZtIXCxgZV05TLmRhdGFPEQG6AAAAAAG6AAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAAMwroSSmVuc2VuaXVzXzIwMDAucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAzDKsXpeqsAAAAAAAAAAAABAAMAAAkgAAAAAAAAAAAAAAAAAAAACUplbnNlbml1cwAAEAAIAADKPHEDAAAAEQAIAADF6WybAAAAAQAUAAzCugAG0+oABW3NAAUAyAAAvzEAAgBMTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBKZW5zZW5pdXM6AEplbnNlbml1c18yMDAwLnBkZgAOACYAEgBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAwADAALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADpVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9KZW5zZW5pdXMvSmVuc2VuaXVzXzIwMDAucGRmABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AtQC6AMICgAKCAocCkgKbAqkCrQK0Ar0CwgLPAtIC5ALnAuwAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC7g==}}

@techreport{Jensenius:2001,
Author = {Jensenius, Alexander Refsum},
Date-Added = {2011-09-06 09:21:55 +0000},
Date-Modified = {2012-08-15 13:53:32 +0000},
Institution = {University of Oslo},
Title = {Laser Dance and the Sound of Naked Piano},
Type = {Term paper},
Year = {2001},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJC4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDAxLnBkZtIXCxgZV05TLmRhdGFPEQG6AAAAAAG6AAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAAMwroSSmVuc2VuaXVzXzIwMDEucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAzDPbhEL84AAAAAAAAAAAABAAMAAAkgAAAAAAAAAAAAAAAAAAAACUplbnNlbml1cwAAEAAIAADKPHEDAAAAEQAIAAC4RCG+AAAAAQAUAAzCugAG0+oABW3NAAUAyAAAvzEAAgBMTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBKZW5zZW5pdXM6AEplbnNlbml1c18yMDAxLnBkZgAOACYAEgBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAwADEALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADpVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9KZW5zZW5pdXMvSmVuc2VuaXVzXzIwMDEucGRmABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AtQC6AMICgAKCAocCkgKbAqkCrQK0Ar0CwgLPAtIC5ALnAuwAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC7g==}}

@techreport{Luciani:2006a,
Abstract = {This report presents the state of the art in “gesture format” and preliminary
propositions of the partners in the specification of a common low level format. It includes (1)
presentation of the cardinal points in research on gesture data, (2) state of the art in existing gesture
format (3) state of each partner as for encoding of gesture data, (4) description of the proposed
format, (5) feedback by each partner. },
Author = {Luciani, Annie and Evrard, Matthieu and Courousse, Damien and Castagne, Nicolas and Summers, Ian and Brady, Alan and Villella, Paolo and Salsedo, Fabio and Portillo, Otniel and Avizzano, Carlo Alberto and Raspolli, Mirko and Bergamasco, Massimo and Volpe, Gualtiero and Mazzarino, Barbara and Wanderley, Marcelo M. and Jensenius, Alexander Refsum and God{\o}y, Rolf Inge and Bardy, Benoit and Stoffregen, Thomas and De Poli, Giovanni and Degotzen, Amalia and Avanzini, F. and Roda, A. and Mion, L. and D’Inca and Trestino, C. and Pirro, D.},
Date-Added = {2007-06-29 09:10:49 +0200},
Date-Modified = {2007-09-14 10:50:05 +0200},
Institution = {IST-2004-002114-ENACTIVE Network of Excellence},
Local-Url = {file://localhost/Users/alexanje/Reference/Bibdesk/Luciani/Luciani_2006a.pdf},
Number = {D.RD3.3.1},
Title = {{Report on Gesture Format. State of the Art. Partners’ propositions}},
Type = {Deliverable 1},
Url = {http://www.enactivenetwork.org},
Year = {2006},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QIS4uL1BERnMvTHVjaWFuaS9MdWNpYW5pXzIwMDZhLnBkZtIXCxgZV05TLmRhdGFPEQGyAAAAAAGyAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAAM62MRTHVjaWFuaV8yMDA2YS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAzrZsDkBocAAAAAAAAAAAABAAMAAAkgAAAAAAAAAAAAAAAAAAAAB0x1Y2lhbmkAABAACAAAyjxxAwAAABEACAAAwOPqZwAAAAEAFAAM62MABtPqAAVtzQAFAMgAAL8xAAIASU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoATHVjaWFuaToATHVjaWFuaV8yMDA2YS5wZGYAAA4AJAARAEwAdQBjAGkAYQBuAGkAXwAyADAAMAA2AGEALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADdVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9MdWNpYW5pL0x1Y2lhbmlfMjAwNmEucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALIAtwC/AnUCdwJ8AocCkAKeAqICqQKyArcCxALHAtkC3ALhAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAuM=},
Bdsk-Url-1 = {http://www.enactivenetwork.org}}

@techreport{Jensenius:2007a,
Annote = {\href{http://www.cost287.org/documentation/stsms/report/jensenius_report.pdf}{http://www.cost287.org/documentation/stsms/report/jensenius\_report.pdf} },
Author = {Jensenius, Alexander Refsum},
Date-Added = {2007-02-26 22:11:55 -0500},
Date-Modified = {2011-12-08 21:54:13 +0000},
Howpublished = {{Short Term Scientific Mission Report}, COST 287 Action ConGAS},
Institution = {{Short Term Scientific Mission Report}, EU COST 287 Action Gesture Controlled Audio Systems (ConGAS)},
Local-Url = {file://localhost/Users/alexanje/Reference/Bibdesk/Jensenius/Jensenius_2007.pdf},
Title = {{GDIF} Development at {McGill}},
Url = {http://urn.nb.no/URN:NBN:no-21768},
Webpdf = {http://urn.nb.no/URN:NBN:no-21768},
Year = {2007},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJC4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDA3LnBkZtIXCxgZV05TLmRhdGFPEQG6AAAAAAG6AAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADKPI0jSCsAAAAMwroSSmVuc2VuaXVzXzIwMDcucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAzENcINrxkAAAAAAAAAAAABAAMAAAkgAAAAAAAAAAAAAAAAAAAACUplbnNlbml1cwAAEAAIAADKPHEDAAAAEQAIAADCDaEJAAAAAQAUAAzCugAG0+oABW3NAAUAyAAAvzEAAgBMTWFjaW50b3NoIEhEOlVzZXJzOgBhbGV4YW5qZToAUmVmZXJlbmNlOgBQREZzOgBKZW5zZW5pdXM6AEplbnNlbml1c18yMDA3LnBkZgAOACYAEgBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAwADcALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADpVc2Vycy9hbGV4YW5qZS9SZWZlcmVuY2UvUERGcy9KZW5zZW5pdXMvSmVuc2VuaXVzXzIwMDcucGRmABMAAS8AABUAAgAP//8AAIAG0hscHR5aJGNsYXNzbmFtZVgkY2xhc3Nlc11OU011dGFibGVEYXRhox0fIFZOU0RhdGFYTlNPYmplY3TSGxwiI1xOU0RpY3Rpb25hcnmiIiBfEA9OU0tleWVkQXJjaGl2ZXLRJidUcm9vdIABAAgAEQAaACMALQAyADcAQABGAE0AVQBgAGcAagBsAG4AcQBzAHUAdwCEAI4AtQC6AMICgAKCAocCkgKbAqkCrQK0Ar0CwgLPAtIC5ALnAuwAAAAAAAACAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAC7g==},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-21768}}

@techreport{Jensenius:2009c,
Author = {Jensenius, Alexander Refsum},
Date-Added = {2010-05-21 19:06:22 +0200},
Date-Modified = {2011-12-08 21:53:51 +0000},
Institution = {{Short Term Scientific Mission Report}, EU COST Action Sonic Interaction Design (SID)},
Title = {Motion capture studies of action-sound couplings in sonic interaction},
Url = {http://urn.nb.no/URN:NBN:no-26163},
Webpdf = {http://urn.nb.no/URN:NBN:no-26163},
Year = {2009},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDA5Yi5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDA5Yi5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMxKHGwq98AAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAxsKTXAAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAwOWIucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAwADkAYgAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAwOWIucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-Url-1 = {http://urn.nb.no/URN:NBN:no-26163}}

@techreport{Jensenius:2011i,
Address = {Oslo, Norway},
Author = {Jensenius, Alexander Refsum and Forsbakk, Anette and Aarsten, Alison Bullock and Skogstad, St{\aa}le Andreas van Dorp},
Date-Added = {2011-05-22 10:47:45 +0200},
Date-Modified = {2012-08-21 08:05:23 +0000},
Institution = {University of Oslo},
Publisher = {University of Oslo},
Title = {Program book of the International Conference on New Interfaces for Musical Expression},
Url = {http://www.nime2011.org/wp-content/uploads/2010/09/NIME2011_Program_Book.pdf},
Webpdf = {http://www.nime2011.org/wp-content/uploads/2010/09/NIME2011_Program_Book.pdf},
Year = {2011},
Bdsk-File-1 = {YnBsaXN0MDDUAQIDBAUGJCVYJHZlcnNpb25YJG9iamVjdHNZJGFyY2hpdmVyVCR0b3ASAAGGoKgHCBMUFRYaIVUkbnVsbNMJCgsMDxJXTlMua2V5c1pOUy5vYmplY3RzViRjbGFzc6INDoACgAOiEBGABIAFgAdccmVsYXRpdmVQYXRoWWFsaWFzRGF0YV8QJS4uL1BERnMvSmVuc2VuaXVzL0plbnNlbml1c18yMDExYS5wZGbSFwsYGVdOUy5kYXRhTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyjyNI0grAAAADMK6E0plbnNlbml1c18yMDExYS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMxnHKgt4mAAAAAAAAAAAAAQADAAAJIAAAAAAAAAAAAAAAAAAAAAlKZW5zZW5pdXMAABAACAAAyjxxAwAAABEACAAAyoLCBgAAAAEAFAAMwroABtPqAAVtzQAFAMgAAL8xAAIATU1hY2ludG9zaCBIRDpVc2VyczoAYWxleGFuamU6AFJlZmVyZW5jZToAUERGczoASmVuc2VuaXVzOgBKZW5zZW5pdXNfMjAxMWEucGRmAAAOACgAEwBKAGUAbgBzAGUAbgBpAHUAcwBfADIAMAAxADEAYQAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAO1VzZXJzL2FsZXhhbmplL1JlZmVyZW5jZS9QREZzL0plbnNlbml1cy9KZW5zZW5pdXNfMjAxMWEucGRmAAATAAEvAAAVAAIAD///AACABtIbHB0eWiRjbGFzc25hbWVYJGNsYXNzZXNdTlNNdXRhYmxlRGF0YaMdHyBWTlNEYXRhWE5TT2JqZWN00hscIiNcTlNEaWN0aW9uYXJ5oiIgXxAPTlNLZXllZEFyY2hpdmVy0SYnVHJvb3SAAQAIABEAGgAjAC0AMgA3AEAARgBNAFUAYABnAGoAbABuAHEAcwB1AHcAhACOALYAuwDDAocCiQKOApkCogKwArQCuwLEAskC1gLZAusC7gLzAAAAAAAAAgEAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAvU=},
Bdsk-Url-1 = {http://www.nime2011.org/proceedings/},
Bdsk-Url-2 = {http://www.nime2011.org/wp-content/uploads/2010/09/NIME2011_Program_Book.pdf}}

[/bibtex]