<div id="app">
<aside>
<h1 class="nobottom">Dynamically Generated Alt Text<br>
<span>With Azure's Computer Vision API</span></h1>
<p><a href="https://aka.ms/Uzrshc" target="_blank">Check out the docs <svg class="arrow" version="1.1" xmlns="http://www.w3.org/2000/svg" width="12" height="12" viewBox="0 0 32 32" aria-labelledby="arrow">
<title id="arrow">arrow-right</title>
<path d="M31 16l-15-15v9h-16v12h16v9z"></path>
</svg></a></p>
<hr>
<section v-if="!noText2">
<transition name="fade" mode="out-in">
<div v-if="desc.length === 0">
<div class="upload">
<h2 class="select" ref="selectimg">Select an image here: </h2>
<input type="file" name="file" id="file" class="inputfile" @change="fileUpload" />
<label for="file" aria-label="upload image">
<svg xmlns="http://www.w3.org/2000/svg" width="45" height="45" viewBox="0 0 32 32" aria-labelledby="plus" role="presentation">
<title id="plus">Plus Sign</title>
<circle cx="15" cy="15" r="15" fill="#bf310b"/>
<line x1="15" x2="15" y1="10" y2="20" fill="none" stroke-weight="2" stroke="#ffffff"/>
<line x1="10" x2="20" y1="15" y2="15" fill="none" stroke-weight="2" stroke="#ffffff"/>
</svg>
</label>
</div>
<!--upload-->
<label for="url">Or enter a url:</label><br>
<input type="text" id="url" v-model.lazy="image" @keyup.enter="visionReq" />
<button class="useone" @click="useMine">Or use one of mine</button>
<p v-if="image.length > 0" class="load">Loading
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 32 32" aria-labelledby="loading" role="presentation" class="loading">
<title id="loading">spinner9</title>
<path fill="#ffffff" d="M16 0c-8.711 0-15.796 6.961-15.995 15.624 0.185-7.558 5.932-13.624 12.995-13.624 7.18 0 13 6.268 13 14 0 1.657 1.343 3 3 3s3-1.343 3-3c0-8.837-7.163-16-16-16zM16 32c8.711 0 15.796-6.961 15.995-15.624-0.185 7.558-5.932 13.624-12.995 13.624-7.18 0-13-6.268-13-14 0-1.657-1.343-3-3-3s-3 1.343-3 3c0 8.837 7.163 16 16 16z"></path>
</svg></p>
</div>
<div v-else>
<button @click="removeImage">Try it again!</button>
</div>
</transition>
</section>
<section v-if="noText && noText2" class="fail">
<p>Nothing was detected!<br> Sorry about that, care to...</p>
<button @click="removeImage">Try again!</button>
</section>
<hr>
<div @click="moreInfo = !moreInfo" class="more">
<button class="more" v-if="!moreInfo">More info...</button>
<button class="more" v-else>Less info</button>
</div>
<transition name="fade">
<p v-if="moreInfo">I kept hearing about machine learning being used for evil and wanted to use it for something good. Social media posts typically don't have a way to enter alt text and the only users I see that reliably remember to add descriptions to the post are
accessibility experts or blind people. Hopefully this allows good alt text to be a bit more ubiquitous. You can find <a href="https://aka.ms/Uzrshc" target="_blank">more information on how Azure's Computer Vision API works</a>, as well as how
to use it in your own projects here.</p>
</transition>
</aside>
<main>
<transition name="fade">
<div v-if="Object.keys(desc).length > 0">
<h3>Alt text:</h3>
<p role="status"><span class="screen-reader-only">Generated alt text:</span> {{ combinedText | capitalize }}</p>
<img :src="image" :alt="combinedText" ref="alttextimg" />
</div>
</transition>
</main>
</div>
$red: #bf310b;
body {
background: #071b6d;
color: white;
padding: 20px;
}
* {
font-family: "Barlow", helvetica, sans-serif;
}
.screen-reader-only {
position: absolute;
left: -10000px;
top: auto;
width: 1px;
height: 1px;
overflow: hidden;
}
button {
background: $red;
color: white;
border: none;
border-radius: 3px;
padding: 5px 10px;
cursor: pointer;
font-style: italic;
font-weight: 300;
font-family: "Lato", sans-serif;
margin: 10px 0;
&.more {
cursor: pointer;
color: #95a2da;
background: none;
border: 0;
font: inherit;
line-height: normal;
overflow: visible;
padding: 0;
-webkit-appearance: button; /* for input */
-webkit-user-select: none; /* for button */
-moz-user-select: none;
-ms-user-select: none;
}
&.useone {
cursor: pointer;
background: none;
border: 0;
font: inherit;
line-height: normal;
overflow: visible;
padding: 0;
-webkit-appearance: button; /* for input */
-webkit-user-select: none; /* for button */
-moz-user-select: none;
-ms-user-select: none;
margin-top: 18px;
color: #e8d499;
font-size: 18px;
transition: 0.25s all ease;
&:hover {
color: #ef9d0e;
}
}
}
label {
font-size: 18px;
}
input {
margin: 10px 0 0;
}
p,
h3 {
margin: 5px 0 0;
}
h1 span {
font-weight: 300;
font-size: 24px;
display: block;
margin: 15px 0;
}
hr {
opacity: 0.2;
width: 100%;
margin-top: 25px;
}
a {
text-decoration: none;
color: #fec802;
}
//base layout start
#app {
display: flex;
aside {
width: 300px;
display: flex;
flex-direction: column;
}
main {
flex-grow: 2;
}
}
@media (min-width: 600px) {
aside {
height: 85vh;
}
main {
height: 85vh;
}
}
@media (max-width: 600px) {
#app {
display: block;
}
aside {
width: 100vw;
}
main {
width: 85vw;
height: 300px;
margin: 0;
}
#canvas {
width: 85%;
margin-left: 0;
margin-top: 20px;
}
.nomobile {
display: none;
}
}
//base layout end
.load {
margin: 15px 0;
}
.fail {
margin: 15px 0;
}
img {
max-width: 400px;
max-height: 600px;
width: 90%;
border: 1px solid #777;
margin: 15px 0;
}
.arrow {
margin-left: 2px;
path {
fill: #fec802;
}
}
.inputfile {
width: 0.1px;
height: 0.1px;
opacity: 0;
overflow: hidden;
position: absolute;
z-index: -1;
}
.inputfile + label {
margin: 4px 10px;
display: inline-block;
transition: 0.3s all ease-out;
}
.inputfile:focus + label circle,
.inputfile + label circle:hover {
fill: #f14619;
}
.inputfile + label {
cursor: pointer; /* "hand" cursor */
}
.inputfile:focus + label {
outline: 1px dotted #000;
outline: -webkit-focus-ring-color auto 5px;
}
.inputfile + label * {
pointer-events: none;
}
//main
main {
padding: 20px;
background: #1f3bad;
margin: 20px;
height: 85vh;
div {
width: 100%;
height: 100%;
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
}
}
main p {
width: 70%;
text-align: center;
}
.upload {
display: flex;
}
//utility
.nobottom {
margin-bottom: 0;
}
.bottom10 {
margin-bottom: 10px;
}
.select {
margin: 10px 0 18px;
font-weight: 300;
}
.loading {
animation: load 1s infinite both;
transform-origin: 50% 50%;
}
@keyframes load {
35% {
opacity: 0.35;
}
100% {
transform: rotate(360deg);
}
}
.fade-enter-active {
transition: opacity 0.35s ease-out;
}
.fade-leave-active {
transition: opacity 0.2s ease-in;
}
.fade-enter,
.fade-leave-to {
opacity: 0;
}
View Compiled
new Vue({
el: "#app",
data() {
return {
textdesc: {},
desc: "",
image: "",
combinedText: "",
noText: false,
noText2: false,
moreInfo: false
};
},
filters: {
capitalize(string) {
return string.charAt(0).toUpperCase() + string.slice(1);
}
},
computed: {
alttext() {
if (Object.keys(this.textdesc).length > 0) {
let textarr = [];
this.textdesc.regions.forEach(region => {
region.lines.forEach(line => {
line.words.forEach(word => {
textarr.push(word.text);
});
});
});
return textarr.join(" ");
}
}
},
methods: {
apiReq(params, urlPath) {
let uribase = `https://westcentralus.api.cognitive.microsoft.com/vision/v1.0/${urlPath}`;
let data, contentType;
if (typeof this.image === "string") {
data = { url: this.image };
contentType = "application/json";
} else {
data = this.image;
contentType = "application/octet-stream";
}
return axios({
method: "post",
url: uribase,
data: data,
params: params,
headers: {
"Content-Type": contentType,
"Ocp-Apim-Subscription-Key": "5cd9392bcf6b4303920916aef005fd37"
},
validateStatus: function(status) {
return status < 500;
}
});
},
visionReq() {
let param1 = {
language: "unk",
"detectOrientation ": "true"
};
let param2 = {
visualFeatures: "Categories,Description,Color",
details: "",
language: "en"
};
this.apiReq(param1, "ocr").then(response => {
if (response.status === 200) {
this.textdesc = response.data;
} else {
this.noText = true;
}
});
this.apiReq(param2, "analyze").then(response => {
if (response.status === 200) {
this.desc = response.data.description.captions[0].text;
} else {
this.noText2 = true;
}
});
},
fileUpload(e) {
var files = e.target.files || e.dataTransfer.files;
if (!files.length) return;
this.image = files[0];
this.createImage();
this.visionReq();
},
useMine() {
this.image =
"https://s3-us-west-2.amazonaws.com/s.cdpn.io/28963/heygirl.jpg";
this.visionReq();
},
createImage() {
var image = new Image();
var reader = new FileReader();
reader.onload = e => {
this.image = e.target.result;
};
reader.readAsDataURL(this.image);
},
removeImage: function(e) {
this.image = "";
this.noText = false;
this.noText2 = false;
this.textdesc = {};
this.desc = "";
this.altText = "";
this.combinedText = "";
setTimeout(() => {
this.$refs.selectimg.focus();
}, 1000);
}
},
watch: {
desc() {
if (this.desc === undefined) {
this.combinedText = "";
} else if (this.desc.length > 0) {
if (this.alttext !== undefined && this.alttext.length > 0) {
this.combinedText = `${this.desc}, the text says: "${this.alttext}"`;
} else {
this.combinedText = `${this.desc}`;
}
}
}
}
});
View Compiled
This Pen doesn't use any external CSS resources.